diff --git a/.forgejo/workflows/build-on-commit.yml b/.forgejo/workflows/build-on-commit.yml deleted file mode 100644 index e8f0d2e3..00000000 --- a/.forgejo/workflows/build-on-commit.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: Build Docker Image on Commit - -on: - push: - branches: - - main - tags: - - '!' # Exclude tags - -jobs: - build-and-publish: - runs-on: docker-builder - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set REPO_VARS - id: repo-url - run: | - echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV - echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV - - - name: Login to OCI registry - run: | - echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin - - - name: Build and push Docker images - run: | - # Build Docker image with commit SHA - docker build -t $REPO_HOST/$REPO_PATH:${{ github.sha }} . - docker push $REPO_HOST/$REPO_PATH:${{ github.sha }} - - # Build Docker image with nightly tag - docker tag $REPO_HOST/$REPO_PATH:${{ github.sha }} $REPO_HOST/$REPO_PATH:nightly - docker push $REPO_HOST/$REPO_PATH:nightly - - # Remove local images to save storage - docker rmi $REPO_HOST/$REPO_PATH:${{ github.sha }} - docker rmi $REPO_HOST/$REPO_PATH:nightly diff --git a/.forgejo/workflows/build-on-tag.yml b/.forgejo/workflows/build-on-tag.yml deleted file mode 100644 index 888102b6..00000000 --- a/.forgejo/workflows/build-on-tag.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: Build and Publish Docker Image on Tag - -on: - push: - tags: - - '*' - -jobs: - build-and-publish: - runs-on: docker-builder - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set REPO_VARS - id: repo-url - run: | - echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV - echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV - - - name: Login to OCI registry - run: | - echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin - - - name: Build and push Docker image - run: | - TAG=${{ github.ref_name }} # Get the tag name from the context - # Build and push multi-platform Docker images - docker build -t $REPO_HOST/$REPO_PATH:$TAG --push . - # Tag and push latest - docker tag $REPO_HOST/$REPO_PATH:$TAG $REPO_HOST/$REPO_PATH:latest - docker push $REPO_HOST/$REPO_PATH:latest - - # Remove the local image to save storage - docker rmi $REPO_HOST/$REPO_PATH:$TAG - docker rmi $REPO_HOST/$REPO_PATH:latest \ No newline at end of file diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml deleted file mode 100644 index aab991d5..00000000 --- a/.github/FUNDING.yml +++ /dev/null @@ -1,10 +0,0 @@ -github: canewsin -patreon: # Replace with a single Patreon username e.g., user1 -open_collective: # Replace with a single Open Collective username e.g., user1 -ko_fi: canewsin -tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel -community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry -liberapay: canewsin -issuehunt: # Replace with a single IssueHunt username e.g., user1 -otechie: # Replace with a single Otechie username e.g., user1 -custom: ['https://paypal.me/PramUkesh', 'https://zerolink.ml/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/'] diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md deleted file mode 100644 index b97ad556..00000000 --- a/.github/ISSUE_TEMPLATE/bug-report.md +++ /dev/null @@ -1,33 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve ZeroNet -title: '' -labels: '' -assignees: '' - ---- - -### Step 1: Please describe your environment - - * ZeroNet version: _____ - * Operating system: _____ - * Web browser: _____ - * Tor status: not available/always/disabled - * Opened port: yes/no - * Special configuration: ____ - -### Step 2: Describe the problem: - -#### Steps to reproduce: - - 1. _____ - 2. _____ - 3. _____ - -#### Observed Results: - - * What happened? This could be a screenshot, a description, log output (you can send log/debug.log file to hello@zeronet.io if necessary), etc. - -#### Expected Results: - - * What did you expect to happen? diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index fe7c8178..00000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for ZeroNet -title: '' -labels: '' -assignees: '' - ---- - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index 27b5c924..00000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,72 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. -# -# ******** NOTE ******** -# We have attempted to detect the languages in your repository. Please check -# the `language` matrix defined below to confirm you have the correct set of -# supported CodeQL languages. -# -name: "CodeQL" - -on: - push: - branches: [ py3-latest ] - pull_request: - # The branches below must be a subset of the branches above - branches: [ py3-latest ] - schedule: - - cron: '32 19 * * 2' - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: [ 'javascript', 'python' ] - # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] - # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v2 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - - # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs - # queries: security-extended,security-and-quality - - - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v2 - - # â„šī¸ Command-line programs to run using the OS shell. - # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - - # If the Autobuild fails above, remove it and uncomment the following three lines. - # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. - - # - run: | - # echo "Run, Build Application using script" - # ./location_of_script_within_repo/buildscript.sh - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml deleted file mode 100644 index 2bdcaf95..00000000 --- a/.github/workflows/tests.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: tests - -on: [push, pull_request] - -jobs: - test: - runs-on: ubuntu-20.04 - strategy: - max-parallel: 16 - matrix: - python-version: ["3.7", "3.8", "3.9"] - - steps: - - name: Checkout ZeroNet - uses: actions/checkout@v2 - with: - submodules: "true" - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - - name: Prepare for installation - run: | - python3 -m pip install setuptools - python3 -m pip install --upgrade pip wheel - python3 -m pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium - - - name: Install - run: | - python3 -m pip install --upgrade -r requirements.txt - python3 -m pip list - - - name: Prepare for tests - run: | - openssl version -a - echo 0 | sudo tee /proc/sys/net/ipv6/conf/all/disable_ipv6 - - - name: Test - run: | - catchsegv python3 -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini - export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python3 -m pytest -x plugins/CryptMessage/Test - export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python3 -m pytest -x plugins/Bigfile/Test - export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python3 -m pytest -x plugins/AnnounceLocal/Test - export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python3 -m pytest -x plugins/OptionalManager/Test - export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test - export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test - find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" - find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" - flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/ diff --git a/.gitignore b/.gitignore index 636cd115..76380f5b 100644 --- a/.gitignore +++ b/.gitignore @@ -3,34 +3,13 @@ __pycache__/ *.py[cod] # Log files -**/*.log +*.log # Hidden files .* -!/.forgejo -!/.github !/.gitignore !/.travis.yml -!/.gitlab-ci.yml -# Temporary files -*.bak # Data dir data/* -*.db - -# Virtualenv -env/* - -# Tor data -tools/tor/data - -# PhantomJS, downloaded manually for unit tests -tools/phantomjs - -# ZeroNet config file -zeronet.conf - -# ZeroNet log files -log/* diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml deleted file mode 100644 index f3e1ed29..00000000 --- a/.gitlab-ci.yml +++ /dev/null @@ -1,48 +0,0 @@ -stages: - - test - -.test_template: &test_template - stage: test - before_script: - - pip install --upgrade pip wheel - # Selenium and requests can't be installed without a requests hint on Python 3.4 - - pip install --upgrade requests>=2.22.0 - - pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium - - pip install --upgrade -r requirements.txt - script: - - pip list - - openssl version -a - - python -m pytest -x plugins/CryptMessage/Test --color=yes - - python -m pytest -x plugins/Bigfile/Test --color=yes - - python -m pytest -x plugins/AnnounceLocal/Test --color=yes - - python -m pytest -x plugins/OptionalManager/Test --color=yes - - python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini --color=yes - - mv plugins/disabled-Multiuser plugins/Multiuser - - python -m pytest -x plugins/Multiuser/Test --color=yes - - mv plugins/disabled-Bootstrapper plugins/Bootstrapper - - python -m pytest -x plugins/Bootstrapper/Test --color=yes - - flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/ - -test:py3.4: - image: python:3.4.3 - <<: *test_template - -test:py3.5: - image: python:3.5.7 - <<: *test_template - -test:py3.6: - image: python:3.6.9 - <<: *test_template - -test:py3.7-openssl1.1.0: - image: python:3.7.0b5 - <<: *test_template - -test:py3.7-openssl1.1.1: - image: python:3.7.4 - <<: *test_template - -test:py3.8: - image: python:3.8.0b3 - <<: *test_template \ No newline at end of file diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 2c602a5a..00000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "plugins"] - path = plugins - url = https://github.com/ZeroNetX/ZeroNet-Plugins.git diff --git a/.travis.yml b/.travis.yml index bdaafa22..dfe577ce 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,47 +1,18 @@ language: python -python: - - 3.4 - - 3.5 - - 3.6 - - 3.7 - - 3.8 -services: - - docker cache: pip -before_install: - - pip install --upgrade pip wheel - - pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium - # - docker build -t zeronet . - # - docker run -d -v $PWD:/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 zeronet +python: + - 2.7 install: - - pip install --upgrade -r requirements.txt - - pip list + - pip install -r requirements.txt before_script: - openssl version -a - # Add an IPv6 config - see the corresponding Travis issue - # https://github.com/travis-ci/travis-ci/issues/8361 - - if [ "${TRAVIS_OS_NAME}" == "linux" ]; then - sudo sh -c 'echo 0 > /proc/sys/net/ipv6/conf/all/disable_ipv6'; - fi script: - - catchsegv python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini - - export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python -m pytest -x plugins/CryptMessage/Test - - export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python -m pytest -x plugins/Bigfile/Test - - export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python -m pytest -x plugins/AnnounceLocal/Test - - export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python -m pytest -x plugins/OptionalManager/Test - - export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test - - export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test - - find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" - - find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" - - flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/ -after_failure: - - zip -r log.zip log/ - - curl --upload-file ./log.zip https://transfer.sh/log.zip + - python -m pytest plugins/CryptMessage/Test + - python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini +before_install: + - pip install -U pytest mock pytest-cov + - pip install codecov + - pip install coveralls after_success: - codecov - coveralls --rcfile=src/Test/coverage.ini -notifications: - email: - recipients: - hello@zeronet.io - on_success: change diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 6974d18a..00000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,649 +0,0 @@ -### ZeroNet 0.9.0 (2023-07-12) Rev4630 - - Fix RDos Issue in Plugins https://github.com/ZeroNetX/ZeroNet-Plugins/pull/9 - - Add trackers to Config.py for failsafety incase missing trackers.txt - - Added Proxy links - - Fix pysha3 dep installation issue - - FileRequest -> Remove Unnecessary check, Fix error wording - - Fix Response when site is missing for `actionAs` - - -### ZeroNet 0.8.5 (2023-02-12) Rev4625 - - Fix(https://github.com/ZeroNetX/ZeroNet/pull/202) for SSL cert gen failed on Windows. - - default theme-class for missing value in `users.json`. - - Fetch Stats Plugin changes. - -### ZeroNet 0.8.4 (2022-12-12) Rev4620 - - Increase Minimum Site size to 25MB. - -### ZeroNet 0.8.3 (2022-12-11) Rev4611 - - main.py -> Fix accessing unassigned varible - - ContentManager -> Support for multiSig - - SiteStrorage.py -> Fix accessing unassigned varible - - ContentManager.py Improve Logging of Valid Signers - -### ZeroNet 0.8.2 (2022-11-01) Rev4610 - - Fix Startup Error when plugins dir missing - - Move trackers to seperate file & Add more trackers - - Config:: Skip loading missing tracker files - - Added documentation for getRandomPort fn - -### ZeroNet 0.8.1 (2022-10-01) Rev4600 - - fix readdress loop (cherry-pick previously added commit from conservancy) - - Remove Patreon badge - - Update README-ru.md (#177) - - Include inner_path of failed request for signing in error msg and response - - Don't Fail Silently When Cert is Not Selected - - Console Log Updates, Specify min supported ZeroNet version for Rust version Protocol Compatibility - - Update FUNDING.yml - -### ZeroNet 0.8.0 (2022-05-27) Rev4591 - - Revert File Open to catch File Access Errors. - -### ZeroNet 0.7.9-patch (2022-05-26) Rev4586 - - Use xescape(s) from zeronet-conservancy - - actionUpdate response Optimisation - - Fetch Plugins Repo Updates - - Fix Unhandled File Access Errors - - Create codeql-analysis.yml - -### ZeroNet 0.7.9 (2022-05-26) Rev4585 - - Rust Version Compatibility for update Protocol msg - - Removed Non Working Trakers. - - Dynamically Load Trackers from Dashboard Site. - - Tracker Supply Improvements. - - Fix Repo Url for Bug Report - - First Party Tracker Update Service using Dashboard Site. - - remove old v2 onion service [#158](https://github.com/ZeroNetX/ZeroNet/pull/158) - -### ZeroNet 0.7.8 (2022-03-02) Rev4580 - - Update Plugins with some bug fixes and Improvements - -### ZeroNet 0.7.6 (2022-01-12) Rev4565 - - Sync Plugin Updates - - Clean up tor v3 patch [#115](https://github.com/ZeroNetX/ZeroNet/pull/115) - - Add More Default Plugins to Repo - - Doubled Site Publish Limits - - Update ZeroNet Repo Urls [#103](https://github.com/ZeroNetX/ZeroNet/pull/103) - - UI/UX: Increases Size of Notifications Close Button [#106](https://github.com/ZeroNetX/ZeroNet/pull/106) - - Moved Plugins to Seperate Repo - - Added `access_key` variable in Config, this used to access restrited plugins when multiuser plugin is enabled. When MultiUserPlugin is enabled we cannot access some pages like /Stats, this key will remove such restriction with access key. - - Added `last_connection_id_current_version` to ConnectionServer, helpful to estimate no of connection from current client version. - - Added current version: connections to /Stats page. see the previous point. - -### ZeroNet 0.7.5 (2021-11-28) Rev4560 - - Add more default trackers - - Change default homepage address to `1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d` - - Change default update site address to `1Update8crprmciJHwp2WXqkx2c4iYp18` - -### ZeroNet 0.7.3 (2021-11-28) Rev4555 - - Fix xrange is undefined error - - Fix Incorrect viewport on mobile while loading - - Tor-V3 Patch by anonymoose - - -### ZeroNet 0.7.1 (2019-07-01) Rev4206 -### Added - - Built-in logging console in the web UI to see what's happening in the background. (pull down top-right 0 button to see it) - - Display database rebuild errors [Thanks to Lola] - - New plugin system that allows to install and manage builtin/third party extensions to the ZeroNet client using the web interface. - - Support multiple trackers_file - - Add OpenSSL 1.1 support to CryptMessage plugin based on Bitmessage modifications [Thanks to radfish] - - Display visual error message on startup errors - - Fix max opened files changing on Windows platform - - Display TLS1.3 compatibility on /Stats page - - Add fake SNI and ALPN to peer connections to make it more like standard https connections - - Hide and ignore tracker_proxy setting in Tor: Always mode as it's going to use Tor anyway. - - Deny websocket connections from unknown origins - - Restrict open_browser values to avoid RCE on sandbox escape - - Offer access web interface by IP address in case of unknown host - - Link to site's sidebar with "#ZeroNet:OpenSidebar" hash - -### Changed - - Allow .. in file names [Thanks to imachug] - - Change unstable trackers - - More clean errors on sites.json/users.json load error - - Various tweaks for tracker rating on unstable connections - - Use OpenSSL 1.1 dlls from default Python Windows distribution if possible - - Re-factor domain resolving for easier domain plugins - - Disable UDP connections if --proxy is used - - New, decorator-based Websocket API permission system to avoid future typo mistakes - -### Fixed - - Fix parsing config lines that have no value - - Fix start.py [Thanks to imachug] - - Allow multiple values of the same key in the config file [Thanks ssdifnskdjfnsdjk for reporting] - - Fix parsing config file lines that has % in the value [Thanks slrslr for reporting] - - Fix bootstrapper plugin hash reloads [Thanks geekless for reporting] - - Fix CryptMessage plugin OpenSSL dll loading on Windows (ZeroMail errors) [Thanks cxgreat2014 for reporting] - - Fix startup error when using OpenSSL 1.1 [Thanks to imachug] - - Fix a bug that did not loaded merged site data for 5 sec after the merged site got added - - Fix typo that allowed to add new plugins in public proxy mode. [Thanks styromaniac for reporting] - - Fix loading non-big files with "|all" postfix [Thanks to krzotr] - - Fix OpenSSL cert generation error crash by change Windows console encoding to utf8 - -#### Wrapper html injection vulnerability [Reported by ivanq] - -In ZeroNet before rev4188 the wrapper template variables was rendered incorrectly. - -Result: The opened site was able to gain WebSocket connection with unrestricted ADMIN/NOSANDBOX access, change configuration values and possible RCE on client's machine. - -Fix: Fixed the template rendering code, disallowed WebSocket connections from unknown locations, restricted open_browser configuration values to avoid possible RCE in case of sandbox escape. - -Note: The fix is also back ported to ZeroNet Py 2.x version (Rev3870) - - -### ZeroNet 0.7.0 (2019-06-12) Rev4106 (First release targeting Python 3.4+) -### Added - - 5-10x faster signature verification by using libsecp256k1 (Thanks to ZeroMux) - - Generated SSL certificate randomization to avoid protocol filters (Thanks to ValdikSS) - - Offline mode - - P2P source code update using ZeroNet protocol - - ecdsaSign/Verify commands to CryptMessage plugin (Thanks to imachug) - - Efficient file rename: change file names instead of re-downloading the file. - - Make redirect optional on site cloning (Thanks to Lola) - - EccPrivToPub / EccPubToPriv functions (Thanks to imachug) - - Detect and change dark/light theme based on OS setting (Thanks to filips123) - -### Changed - - Re-factored code to Python3 runtime (compatible with Python 3.4-3.8) - - More safe database sync mode - - Removed bundled third-party libraries where it's possible - - Use lang=en instead of lang={lang} in urls to avoid url encode problems - - Remove environment details from error page - - Don't push content.json updates larger than 10kb to significantly reduce bw usage for site with many files - -### Fixed - - Fix sending files with \0 characters - - Security fix: Escape error detail to avoid XSS (reported by krzotr) - - Fix signature verification using libsecp256k1 for compressed addresses (mostly certificates generated in the browser) - - Fix newsfeed if you have more than 1000 followed topic/post on one site. - - Fix site download as zip file - - Fix displaying sites with utf8 title - - Error message if dbRebuild fails (Thanks to Lola) - - Fix browser reopen if executing start.py again. (Thanks to imachug) - - -### ZeroNet 0.6.5 (2019-02-16) Rev3851 (Last release targeting Python 2.7.x) -### Added - - IPv6 support in peer exchange, bigfiles, optional file finding, tracker sharing, socket listening and connecting (based on tangdou1 modifications) - - New tracker database format with IPv6 support - - Display notification if there is an unpublished modification for your site - - Listen and shut down normally for SIGTERM (Thanks to blurHY) - - Support tilde `~` in filenames (by d14na) - - Support map for Namecoin subdomain names (Thanks to lola) - - Add log level to config page - - Support `{data}` for data dir variable in trackers_file value - - Quick check content.db on startup and rebuild if necessary - - Don't show meek proxy option if the tor client does not supports it - -### Changed - - Refactored port open checking with IPv6 support - - Consider non-local IPs as external even is the open port check fails (for CJDNS and Yggdrasil support) - - Add IPv6 tracker and change unstable tracker - - Don't correct sent local time with the calculated time correction - - Disable CSP for Edge - - Only support CREATE commands in dbschema indexes node and SELECT from storage.query - -### Fixed - - Check the length of master seed when executing cryptGetPrivatekey CLI command - - Only reload source code on file modification / creation - - Detection and issue warning for latest no-script plugin - - Fix atomic write of a non-existent file - - Fix sql queries with lots of variables and sites with lots of content.json - - Fix multi-line parsing of zeronet.conf - - Fix site deletion from users.json - - Fix site cloning before site downloaded (Reported by unsystemizer) - - Fix queryJson for non-list nodes (Reported by MingchenZhang) - - -## ZeroNet 0.6.4 (2018-10-20) Rev3660 -### Added - - New plugin: UiConfig. A web interface that allows changing ZeroNet settings. - - New plugin: AnnounceShare. Share trackers between users, automatically announce client's ip as tracker if Bootstrapper plugin is enabled. - - Global tracker stats on ZeroHello: Include statistics from all served sites instead of displaying request statistics only for one site. - - Support custom proxy for trackers. (Configurable with /Config) - - Adding peers to sites manually using zeronet_peers get parameter - - Copy site address with peers link on the sidebar. - - Zip file listing and streaming support for Bigfiles. - - Tracker statistics on /Stats page - - Peer reputation save/restore to speed up sync time after startup. - - Full support fileGet, fileList, dirList calls on tar.gz/zip files. - - Archived_before support to user content rules to allow deletion of all user files before the specified date - - Show and manage "Connecting" sites on ZeroHello - - Add theme support to ZeroNet sites - - Dark theme for ZeroHello, ZeroBlog, ZeroTalk - -### Changed - - Dynamic big file allocation: More efficient storage usage by don't pre-allocate the whole file at the beginning, but expand the size as the content downloads. - - Reduce the request frequency to unreliable trackers. - - Only allow 5 concurrent checkSites to run in parallel to reduce load under Tor/slow connection. - - Stop site downloading if it reached 95% of site limit to avoid download loop for sites out of limit - - The pinned optional files won't be removed from download queue after 30 retries and won't be deleted even if the site owner removes it. - - Don't remove incomplete (downloading) sites on startup - - Remove --pin_bigfile argument as big files are automatically excluded from optional files limit. - -### Fixed - - Trayicon compatibility with latest gevent - - Request number counting for zero:// trackers - - Peer reputation boost for zero:// trackers. - - Blocklist of peers loaded from peerdb (Thanks tangdou1 for report) - - Sidebar map loading on foreign languages (Thx tangdou1 for report) - - FileGet on non-existent files (Thanks mcdev for reporting) - - Peer connecting bug for sites with low amount of peers - -#### "The Vacation" Sandbox escape bug [Reported by GitCenter / Krixano / ZeroLSTN] - -In ZeroNet 0.6.3 Rev3615 and earlier as a result of invalid file type detection, a malicious site could escape the iframe sandbox. - -Result: Browser iframe sandbox escape - -Applied fix: Replaced the previous, file extension based file type identification with a proper one. - -Affected versions: All versions before ZeroNet Rev3616 - - -## ZeroNet 0.6.3 (2018-06-26) -### Added - - New plugin: ContentFilter that allows to have shared site and user block list. - - Support Tor meek proxies to avoid tracker blocking of GFW - - Detect network level tracker blocking and easy setting meek proxy for tracker connections. - - Support downloading 2GB+ sites as .zip (Thx to Radtoo) - - Support ZeroNet as a transparent proxy (Thx to JeremyRand) - - Allow fileQuery as CORS command (Thx to imachug) - - Windows distribution includes Tor and meek client by default - - Download sites as zip link to sidebar - - File server port randomization - - Implicit SSL for all connection - - fileList API command for zip files - - Auto download bigfiles size limit on sidebar - - Local peer number to the sidebar - - Open site directory button in sidebar - -### Changed - - Switched to Azure Tor meek proxy as Amazon one became unavailable - - Refactored/rewritten tracker connection manager - - Improved peer discovery for optional files without opened port - - Also delete Bigfile's piecemap on deletion - -### Fixed - - Important security issue: Iframe sandbox escape [Reported by Ivanq / gitcenter] - - Local peer discovery when running multiple clients on the same machine - - Uploading small files with Bigfile plugin - - Ctrl-c shutdown when running CLI commands - - High CPU/IO usage when Multiuser plugin enabled - - Firefox back button - - Peer discovery on older Linux kernels - - Optional file handling when multiple files have the same hash_id (first 4 chars of the hash) - - Msgpack 0.5.5 and 0.5.6 compatibility - -## ZeroNet 0.6.2 (2018-02-18) - -### Added - - New plugin: AnnounceLocal to make ZeroNet work without an internet connection on the local network. - - Allow dbQuey and userGetSettings using the `as` API command on different sites with Cors permission - - New config option: `--log_level` to reduce log verbosity and IO load - - Prefer to connect to recent peers from trackers first - - Mark peers with port 1 is also unconnectable for future fix for trackers that do not support port 0 announce - -### Changed - - Don't keep connection for sites that have not been modified in the last week - - Change unreliable trackers to new ones - - Send maximum 10 findhash request in one find optional files round (15sec) - - Change "Unique to site" to "No certificate" for default option in cert selection dialog. - - Dont print warnings if not in debug mode - - Generalized tracker logging format - - Only recover sites from sites.json if they had peers - - Message from local peers does not means internet connection - - Removed `--debug_gevent` and turned on Gevent block logging by default - -### Fixed - - Limit connections to 512 to avoid reaching 1024 limit on windows - - Exception when logging foreign operating system socket errors - - Don't send private (local) IPs on pex - - Don't connect to private IPs in tor always mode - - Properly recover data from msgpack unpacker on file stream start - - Symlinked data directory deletion when deleting site using Windows - - De-duplicate peers before publishing - - Bigfile info for non-existing files - - -## ZeroNet 0.6.1 (2018-01-25) - -### Added - - New plugin: Chart - - Collect and display charts about your contribution to ZeroNet network - - Allow list as argument replacement in sql queries. (Thanks to imachug) - - Newsfeed query time statistics (Click on "From XX sites in X.Xs on ZeroHello) - - New UiWebsocket API command: As to run commands as other site - - Ranged ajax queries for big files - - Filter feed by type and site address - - FileNeed, Bigfile upload command compatibility with merger sites - - Send event on port open / tor status change - - More description on permission request - -### Changed - - Reduce memory usage of sidebar geoip database cache - - Change unreliable tracker to new one - - Don't display Cors permission ask if it already granted - - Avoid UI blocking when rebuilding a merger site - - Skip listing ignored directories on signing - - In Multiuser mode show the seed welcome message when adding new certificate instead of first visit - - Faster async port opening on multiple network interfaces - - Allow javascript modals - - Only zoom sidebar globe if mouse button is pressed down - -### Fixed - - Open port checking error reporting (Thanks to imachug) - - Out-of-range big file requests - - Don't output errors happened on gevent greenlets twice - - Newsfeed skip sites with no database - - Newsfeed queries with multiple params - - Newsfeed queries with UNION and UNION ALL - - Fix site clone with sites larger that 10MB - - Unreliable Websocket connection when requesting files from different sites at the same time - - -## ZeroNet 0.6.0 (2017-10-17) - -### Added - - New plugin: Big file support - - Automatic pinning on Big file download - - Enable TCP_NODELAY for supporting sockets - - actionOptionalFileList API command arguments to list non-downloaded files or only big files - - serverShowdirectory API command arguments to allow to display site's directory in OS file browser - - fileNeed API command to initialize optional file downloading - - wrapperGetAjaxKey API command to request nonce for AJAX request - - Json.gz support for database files - - P2P port checking (Thanks for grez911) - - `--download_optional auto` argument to enable automatic optional file downloading for newly added site - - Statistics for big files and protocol command requests on /Stats - - Allow to set user limitation based on auth_address - -### Changed - - More aggressive and frequent connection timeout checking - - Use out of msgpack context file streaming for files larger than 512KB - - Allow optional files workers over the worker limit - - Automatic redirection to wrapper on nonce_error - - Send websocket event on optional file deletion - - Optimize sites.json saving - - Enable faster C-based msgpack packer by default - - Major optimization on Bootstrapper plugin SQL queries - - Don't reset bad file counter on restart, to allow easier give up on unreachable files - - Incoming connection limit changed from 1000 to 500 to avoid reaching socket limit on Windows - - Changed tracker boot.zeronet.io domain, because zeronet.io got banned in some countries - -#### Fixed - - Sub-directories in user directories - -## ZeroNet 0.5.7 (2017-07-19) -### Added - - New plugin: CORS to request read permission to other site's content - - New API command: userSetSettings/userGetSettings to store site's settings in users.json - - Avoid file download if the file size does not match with the requested one - - JavaScript and wrapper less file access using /raw/ prefix ([Example](http://127.0.0.1:43110/raw/1AsRLpuRxr3pb9p3TKoMXPSWHzh6i7fMGi/en.tar.gz/index.html)) - - --silent command line option to disable logging to stdout - - -### Changed - - Better error reporting on sign/verification errors - - More test for sign and verification process - - Update to OpenSSL v1.0.2l - - Limit compressed files to 6MB to avoid zip/tar.gz bomb - - Allow space, [], () characters in filenames - - Disable cross-site resource loading to improve privacy. [Reported by Beardog108] - - Download directly accessed Pdf/Svg/Swf files instead of displaying them to avoid wrapper escape using in JS in SVG file. [Reported by Beardog108] - - Disallow potentially unsafe regular expressions to avoid ReDoS [Reported by MuxZeroNet] - -### Fixed - - Detecting data directory when running Windows distribution exe [Reported by Plasmmer] - - OpenSSL loading under Android 6+ - - Error on exiting when no connection server started - - -## ZeroNet 0.5.6 (2017-06-15) -### Added - - Callback for certSelect API command - - More compact list formatting in json - -### Changed - - Remove obsolete auth_key_sha512 and signature format - - Improved Spanish translation (Thanks to Pupiloho) - -### Fixed - - Opened port checking (Thanks l5h5t7 & saber28 for reporting) - - Standalone update.py argument parsing (Thanks Zalex for reporting) - - uPnP crash on startup (Thanks Vertux for reporting) - - CoffeeScript 1.12.6 compatibility (Thanks kavamaken & imachug) - - Multi value argument parsing - - Database error when running from directory that contains special characters (Thanks Pupiloho for reporting) - - Site lock violation logging - - -#### Proxy bypass during source upgrade [Reported by ZeroMux] - -In ZeroNet before 0.5.6 during the client's built-in source code upgrade mechanism, -ZeroNet did not respect Tor and/or proxy settings. - -Result: ZeroNet downloaded the update without using the Tor network and potentially leaked the connections. - -Fix: Removed the problematic code line from the updater that removed the proxy settings from the socket library. - -Affected versions: ZeroNet 0.5.5 and earlier, Fixed in: ZeroNet 0.5.6 - - -#### XSS vulnerability using DNS rebinding. [Reported by Beardog108] - -In ZeroNet before 0.5.6 the web interface did not validate the request's Host parameter. - -Result: An attacker using a specially crafted DNS entry could have bypassed the browser's cross-site-scripting protection -and potentially gained access to user's private data stored on site. - -Fix: By default ZeroNet only accept connections from 127.0.0.1 and localhost hosts. -If you bind the ui server to an external interface, then it also adds the first http request's host to the allowed host list -or you can define it manually using --ui_host. - -Affected versions: ZeroNet 0.5.5 and earlier, Fixed in: ZeroNet 0.5.6 - - -## ZeroNet 0.5.5 (2017-05-18) -### Added -- Outgoing socket binding by --bind parameter -- Database rebuilding progress bar -- Protect low traffic site's peers from cleanup closing -- Local site blacklisting -- Cloned site source code upgrade from parent -- Input placeholder support for displayPrompt -- Alternative interaction for wrapperConfirm - -### Changed -- New file priorities for faster site display on first visit -- Don't add ? to url if push/replaceState url starts with # - -### Fixed -- PermissionAdd/Remove admin command requirement -- Multi-line confirmation dialog - - -## ZeroNet 0.5.4 (2017-04-14) -### Added -- Major speed and CPU usage enhancements in Tor always mode -- Send skipped modifications to outdated clients - -### Changed -- Upgrade libs to latest version -- Faster port opening and closing -- Deny site limit modification in MultiUser mode - -### Fixed -- Filling database from optional files -- OpenSSL detection on systems with OpenSSL 1.1 -- Users.json corruption on systems with slow hdd -- Fix leaking files in data directory by webui - - -## ZeroNet 0.5.3 (2017-02-27) -### Added -- Tar.gz/zip packed site support -- Utf8 filenames in archive files -- Experimental --db_mode secure database mode to prevent data loss on systems with unreliable power source. -- Admin user support in MultiUser mode -- Optional deny adding new sites in MultiUser mode - -### Changed -- Faster update and publish times by new socket sharing algorithm - -### Fixed -- Fix missing json_row errors when using Mute plugin - - -## ZeroNet 0.5.2 (2017-02-09) -### Added -- User muting -- Win/Mac signed exe/.app -- Signed commits - -### Changed -- Faster site updates after startup -- New macOS package for 10.10 compatibility - -### Fixed -- Fix "New version just released" popup on page first visit -- Fix disappearing optional files bug (Thanks l5h5t7 for reporting) -- Fix skipped updates on unreliable connections (Thanks P2P for reporting) -- Sandbox escape security fix (Thanks Firebox for reporting) -- Fix error reporting on async websocket functions - - -## ZeroNet 0.5.1 (2016-11-18) -### Added -- Multi language interface -- New plugin: Translation helper for site html and js files -- Per-site favicon - -### Fixed -- Parallel optional file downloading - - -## ZeroNet 0.5.0 (2016-11-08) -### Added -- New Plugin: Allow list/delete/pin/manage files on ZeroHello -- New API commands to follow user's optional files, and query stats for optional files -- Set total size limit on optional files. -- New Plugin: Save peers to database and keep them between restarts to allow more faster optional file search and make it work without trackers -- Rewritten uPnP port opener + close port on exit (Thanks to sirMackk!) -- Lower memory usage by lazy PeerHashfield creation -- Loaded json files statistics and database info at /Stats page - -### Changed -- Separate lock file for better Windows compatibility -- When executing start.py open browser even if ZeroNet is already running -- Keep plugin order after reload to allow plugins to extends an another plug-in -- Only save sites.json if fully loaded to avoid data loss -- Change aletorrenty tracker to a more reliable one -- Much lower findhashid CPU usage -- Pooled downloading of large amount of optional files -- Lots of other optional file changes to make it better -- If we have 1000 peers for a site make cleanup more aggressive -- Use warning instead of error on verification errors -- Push updates to newer clients first -- Bad file reset improvements - -### Fixed -- Fix site deletion errors on startup -- Delay websocket messages until it's connected -- Fix database import if data file contains extra data -- Fix big site download -- Fix diff sending bug (been chasing it for a long time) -- Fix random publish errors when json file contained [] characters -- Fix site delete and siteCreate bug -- Fix file write confirmation dialog - - -## ZeroNet 0.4.1 (2016-09-05) -### Added -- Major core changes to allow fast startup and lower memory usage -- Try to reconnect to Tor on lost connection -- Sidebar fade-in -- Try to avoid incomplete data files overwrite -- Faster database open -- Display user file sizes in sidebar -- Concurrent worker number depends on --connection_limit - -### Changed -- Close databases after 5 min idle time -- Better site size calculation -- Allow "-" character in domains -- Always try to keep connections for sites -- Remove merger permission from merged sites -- Newsfeed scans only last 3 days to speed up database queries -- Updated ZeroBundle-win to Python 2.7.12 - -### Fixed -- Fix for important security problem, which is allowed anyone to publish new content without valid certificate from ID provider. Thanks Kaffie for pointing it out! -- Fix sidebar error when no certificate provider selected -- Skip invalid files on database rebuilding -- Fix random websocket connection error popups -- Fix new siteCreate command -- Fix site size calculation -- Fix port open checking after computer wake up -- Fix --size_limit parsing from command line - - -## ZeroNet 0.4.0 (2016-08-11) -### Added -- Merger site plugin -- Live source code reloading: Faster core development by allowing me to make changes in ZeroNet source code without restarting it. -- New json table format for merger sites -- Database rebuild from sidebar. -- Allow to store custom data directly in json table: Much simpler and faster SQL queries. -- User file archiving: Allows the site owner to archive inactive user's content into single file. (Reducing initial sync time/cpu/memory usage) -- Also trigger onUpdated/update database on file delete. -- Permission request from ZeroFrame API. -- Allow to store extra data in content.json using fileWrite API command. -- Faster optional files downloading -- Use alternative sources (Gogs, Gitlab) to download updates -- Track provided sites/connection and prefer to keep the ones with more sites to reduce connection number - -### Changed -- Keep at least 5 connection per site -- Changed target connection for sites to 10 from 15 -- ZeroHello search function stability/speed improvements -- Improvements for clients with slower HDD - -### Fixed -- Fix IE11 wrapper nonce errors -- Fix sidebar on mobile devices -- Fix site size calculation -- Fix IE10 compatibility -- Windows XP ZeroBundle compatibility (THX to people of China) - - -## ZeroNet 0.3.7 (2016-05-27) -### Changed -- Patch command to reduce bandwidth usage by transfer only the changed lines -- Other cpu/memory optimizations - - -## ZeroNet 0.3.6 (2016-05-27) -### Added -- New ZeroHello -- Newsfeed function - -### Fixed -- Security fixes - - -## ZeroNet 0.3.5 (2016-02-02) -### Added -- Full Tor support with .onion hidden services -- Bootstrap using ZeroNet protocol - -### Fixed -- Fix Gevent 1.0.2 compatibility - - -## ZeroNet 0.3.4 (2015-12-28) -### Added -- AES, ECIES API function support -- PushState and ReplaceState url manipulation support in API -- Multiuser localstorage diff --git a/Dockerfile b/Dockerfile index 3f1d3c18..aa7ca724 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,33 +1,28 @@ -FROM alpine:3.15 +FROM ubuntu:14.04 + +MAINTAINER Felix Imobersteg #Base settings +ENV DEBIAN_FRONTEND noninteractive ENV HOME /root -COPY requirements.txt /root/requirements.txt +#Update package lists +RUN apt-get update -y -#Install ZeroNet -RUN apk --update --no-cache --no-progress add python3 python3-dev py3-pip gcc g++ autoconf automake libtool libffi-dev musl-dev make tor openssl \ - && pip3 install -r /root/requirements.txt \ - && apk del python3-dev gcc g++ autoconf automake libtool libffi-dev musl-dev make \ - && echo "ControlPort 9051" >> /etc/tor/torrc \ - && echo "CookieAuthentication 1" >> /etc/tor/torrc - -RUN python3 -V \ - && python3 -m pip list \ - && tor --version \ - && openssl version +#Install ZeroNet deps +RUN apt-get install msgpack-python python-gevent python-pip python-dev -y +RUN pip install msgpack-python --upgrade #Add Zeronet source -COPY . /root -VOLUME /root/data +ADD . /root -#Control if Tor proxy is started -ENV ENABLE_TOR true - -WORKDIR /root +#Slimming down Docker containers +RUN apt-get clean -y +RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* #Set upstart command -CMD (! ${ENABLE_TOR} || tor&) && python3 zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26117 +CMD cd /root && python zeronet.py --ui_ip 0.0.0.0 #Expose ports -EXPOSE 43110 26117 +EXPOSE 43110 +EXPOSE 15441 \ No newline at end of file diff --git a/Dockerfile.arm64v8 b/Dockerfile.arm64v8 deleted file mode 100644 index d27b7620..00000000 --- a/Dockerfile.arm64v8 +++ /dev/null @@ -1,34 +0,0 @@ -FROM alpine:3.12 - -#Base settings -ENV HOME /root - -COPY requirements.txt /root/requirements.txt - -#Install ZeroNet -RUN apk --update --no-cache --no-progress add python3 python3-dev gcc libffi-dev musl-dev make tor openssl \ - && pip3 install -r /root/requirements.txt \ - && apk del python3-dev gcc libffi-dev musl-dev make \ - && echo "ControlPort 9051" >> /etc/tor/torrc \ - && echo "CookieAuthentication 1" >> /etc/tor/torrc - -RUN python3 -V \ - && python3 -m pip list \ - && tor --version \ - && openssl version - -#Add Zeronet source -COPY . /root -VOLUME /root/data - -#Control if Tor proxy is started -ENV ENABLE_TOR false - -WORKDIR /root - -#Set upstart command -CMD (! ${ENABLE_TOR} || tor&) && python3 zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26552 - -#Expose ports -EXPOSE 43110 26552 - diff --git a/LICENSE b/LICENSE index 0d17b72d..d6a93266 100644 --- a/LICENSE +++ b/LICENSE @@ -1,27 +1,340 @@ -This program is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation, version 3. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program. If not, see . - - -Additional Conditions : - -Contributing to this repo - This repo is governed by GPLv3, same is located at the root of the ZeroNet git repo, - unless specified separately all code is governed by that license, contributions to this repo - are divided into two key types, key contributions and non-key contributions, key contributions - are which, directly affects the code performance, quality and features of software, - non key contributions include things like translation datasets, image, graphic or video - contributions that does not affect the main usability of software but improves the existing - usability of certain thing or feature, these also include tests written with code, since their - purpose is to check, whether something is working or not as intended. All the non-key contributions - are governed by [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/), unless specified - above, a contribution is ruled by the type of contribution if there is a conflict between two - contributing parties of repo in any case. +GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + {description} + Copyright (C) {year} {fullname} + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + {signature of Ty Coon}, 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. + diff --git a/README-ru.md b/README-ru.md deleted file mode 100644 index 7d557727..00000000 --- a/README-ru.md +++ /dev/null @@ -1,133 +0,0 @@ -# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet) - -[įŽ€äŊ“中文](./README-zh-cn.md) -[English](./README.md) - -ДĐĩ҆ĐĩĐŊŅ‚Ņ€Đ°ĐģиСОваĐŊĐŊŅ‹Đĩ вĐĩĐąŅĐ°ĐšŅ‚Ņ‹, Đ¸ŅĐŋĐžĐģŅŒĐˇŅƒŅŽŅ‰Đ¸Đĩ ĐēŅ€Đ¸ĐŋŅ‚ĐžĐŗŅ€Đ°Ņ„Đ¸ŅŽ Bitcoin и ĐŋŅ€ĐžŅ‚ĐžĐēĐžĐģ BitTorrent — https://zeronet.dev ([ЗĐĩŅ€ĐēаĐģĐž в ZeroNet](http://127.0.0.1:43110/1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX/)). В ĐžŅ‚ĐģĐ¸Ņ‡Đ¸Đ¸ ĐžŅ‚ Bitcoin, ZeroNet'҃ ĐŊĐĩ ҂ҀĐĩĐąŅƒĐĩŅ‚ŅŅ ĐąĐģĐžĐē҇ĐĩĐšĐŊ Đ´ĐģŅ Ņ€Đ°ĐąĐžŅ‚Ņ‹, ОдĐŊаĐēĐž ĐžĐŊ Đ¸ŅĐŋĐžĐģŅŒĐˇŅƒĐĩŅ‚ Ņ‚Ņƒ ĐļĐĩ ĐēŅ€Đ¸ĐŋŅ‚ĐžĐŗŅ€Đ°Ņ„Đ¸ŅŽ, Ņ‡Ņ‚ĐžĐąŅ‹ ОйĐĩҁĐŋĐĩŅ‡Đ¸Ņ‚ŅŒ ŅĐžŅ…Ņ€Đ°ĐŊĐŊĐžŅŅ‚ŅŒ и ĐŋŅ€ĐžĐ˛ĐĩŅ€Đē҃ даĐŊĐŊҋ҅. - -## Đ—Đ°Ņ‡ĐĩĐŧ? - -- ĐœŅ‹ вĐĩŅ€Đ¸Đŧ в ĐžŅ‚ĐēŅ€Ņ‹Ņ‚ŅƒŅŽ, ŅĐ˛ĐžĐąĐžĐ´ĐŊŅƒŅŽ, и ĐŊĐĩĐŋĐžĐ´Đ´Đ°ŅŽŅ‰ŅƒŅŽŅŅ ҆ĐĩĐŊĐˇŅƒŅ€Đĩ ҁĐĩŅ‚ŅŒ и ŅĐ˛ŅĐˇŅŒ. -- НĐĩŅ‚ ĐĩдиĐŊОК Ņ‚ĐžŅ‡Đēи ĐžŅ‚ĐēаСа: ĐĄĐ°ĐšŅ‚ ĐžŅŅ‚Đ°Ņ‘Ņ‚ŅŅ ĐžĐŊĐģаКĐŊ, ĐŋĐžĐēа ĐĩĐŗĐž ĐžĐąŅĐģ҃ĐļиваĐĩŅ‚ Ņ…ĐžŅ‚Ņ ĐąŅ‹ 1 ĐŋĐ¸Ņ€. -- НĐĩŅ‚ ĐˇĐ°Ņ‚Ņ€Đ°Ņ‚ ĐŊа Ņ…ĐžŅŅ‚Đ¸ĐŊĐŗ: ĐĄĐ°ĐšŅ‚Ņ‹ ĐžĐąŅĐģ҃ĐļĐ¸Đ˛Đ°ŅŽŅ‚ŅŅ ĐŋĐžŅĐĩŅ‚Đ¸Ņ‚ĐĩĐģŅĐŧи. -- НĐĩвОСĐŧĐžĐļĐŊĐž ĐžŅ‚ĐēĐģŅŽŅ‡Đ¸Ņ‚ŅŒ: ОĐŊ ĐŊĐ¸ĐŗĐ´Đĩ, ĐŋĐžŅ‚ĐžĐŧ҃ Ņ‡Ņ‚Đž ĐžĐŊ вĐĩСдĐĩ. -- ĐĄĐēĐžŅ€ĐžŅŅ‚ŅŒ и вОСĐŧĐžĐļĐŊĐžŅŅ‚ŅŒ Ņ€Đ°ĐąĐžŅ‚Đ°Ņ‚ŅŒ ĐąĐĩС ИĐŊŅ‚ĐĩŅ€ĐŊĐĩŅ‚Đ°: Đ’Ņ‹ ҁĐŧĐžĐļĐĩŅ‚Đĩ ĐŋĐžĐģŅƒŅ‡Đ¸Ņ‚ŅŒ Đ´ĐžŅŅ‚ŅƒĐŋ Đē ŅĐ°ĐšŅ‚Ņƒ, ĐŋĐžŅ‚ĐžĐŧ҃ Ņ‡Ņ‚Đž ĐĩĐŗĐž ĐēĐžĐŋĐ¸Ņ Ņ…Ņ€Đ°ĐŊĐ¸Ņ‚ŅŅ ĐŊа Đ˛Đ°ŅˆĐĩĐŧ ĐēĐžĐŧĐŋŅŒŅŽŅ‚ĐĩŅ€Đĩ и ҃ Đ˛Đ°ŅˆĐ¸Ņ… ĐŋĐ¸Ņ€ĐžĐ˛. - -## ĐžŅĐžĐąĐĩĐŊĐŊĐžŅŅ‚Đ¸ - -- ОбĐŊОвĐģĐĩĐŊиĐĩ ŅĐ°ĐšŅ‚ĐžĐ˛ в Ņ€ĐĩаĐģҌĐŊĐžĐŧ Đ˛Ņ€ĐĩĐŧĐĩĐŊи -- ПоддĐĩŅ€ĐļĐēа Đ´ĐžĐŧĐĩĐŊОв `.bit` ([Namecoin](https://www.namecoin.org)) -- ЛĐĩĐŗĐēĐ°Ņ ŅƒŅŅ‚Đ°ĐŊОвĐēа: ĐŋŅ€ĐžŅŅ‚Đž Ņ€Đ°ŅĐŋаĐēŅƒĐšŅ‚Đĩ и СаĐŋŅƒŅŅ‚Đ¸Ņ‚Đĩ -- КĐģĐžĐŊĐ¸Ņ€ĐžĐ˛Đ°ĐŊиĐĩ ŅĐ°ĐšŅ‚ĐžĐ˛ "в ОдиĐŊ ĐēĐģиĐē" -- БĐĩҁĐŋĐ°Ņ€ĐžĐģҌĐŊĐ°Ņ [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) - Đ°Đ˛Ņ‚ĐžŅ€Đ¸ĐˇĐ°Ņ†Đ¸Ņ: Đ’Đ°ŅˆĐ° ŅƒŅ‡ĐĩŅ‚ĐŊĐ°Ņ СаĐŋĐ¸ŅŅŒ ĐˇĐ°Ņ‰Đ¸Ņ‰ĐĩĐŊа Ņ‚ĐžĐš ĐļĐĩ ĐēŅ€Đ¸ĐŋŅ‚ĐžĐŗŅ€Đ°Ņ„Đ¸ĐĩĐš, Ņ‡Ņ‚Đž и Đ˛Đ°Ņˆ Bitcoin-ĐēĐžŅˆĐĩĐģĐĩĐē -- Đ’ŅŅ‚Ņ€ĐžĐĩĐŊĐŊŅ‹Đš SQL-ҁĐĩŅ€Đ˛ĐĩŅ€ ҁ ŅĐ¸ĐŊŅ…Ņ€ĐžĐŊĐ¸ĐˇĐ°Ņ†Đ¸ĐĩĐš даĐŊĐŊҋ҅ P2P: ПозвоĐģŅĐĩŅ‚ ҃ĐŋŅ€ĐžŅŅ‚Đ¸Ņ‚ŅŒ Ņ€Đ°ĐˇŅ€Đ°ĐąĐžŅ‚Đē҃ ŅĐ°ĐšŅ‚Đ° и ҃ҁĐēĐžŅ€Đ¸Ņ‚ŅŒ ĐˇĐ°ĐŗŅ€ŅƒĐˇĐē҃ ŅŅ‚Ņ€Đ°ĐŊĐ¸Ņ†Ņ‹ -- АĐŊĐžĐŊиĐŧĐŊĐžŅŅ‚ŅŒ: ПоĐģĐŊĐ°Ņ ĐŋОддĐĩŅ€ĐļĐēа ҁĐĩŅ‚Đ¸ Tor, Đ¸ŅĐŋĐžĐģŅŒĐˇŅƒŅ ҁĐēҀҋ҂ҋĐĩ ҁĐģ҃ĐļĐąŅ‹ `.onion` вĐŧĐĩŅŅ‚Đž Đ°Đ´Ņ€ĐĩŅĐžĐ˛ IPv4 -- Đ—Đ°ŅˆĐ¸Ņ„Ņ€ĐžĐ˛Đ°ĐŊĐŊĐžĐĩ TLS ĐŋОдĐēĐģŅŽŅ‡ĐĩĐŊиĐĩ -- ĐĐ˛Ņ‚ĐžĐŧĐ°Ņ‚Đ¸Ņ‡ĐĩҁĐēĐžĐĩ ĐžŅ‚ĐēŅ€Ņ‹Ņ‚Đ¸Đĩ UPnP–ĐŋĐžŅ€Ņ‚Đ° -- ПĐģĐ°ĐŗĐ¸ĐŊ Đ´ĐģŅ ĐŋОддĐĩŅ€ĐļĐēи ĐŊĐĩҁĐēĐžĐģҌĐēĐ¸Ņ… ĐŋĐžĐģŅŒĐˇĐžĐ˛Đ°Ņ‚ĐĩĐģĐĩĐš (openproxy) -- Đ Đ°ĐąĐžŅ‚Đ° ҁ ĐģŅŽĐąŅ‹Đŧи ĐąŅ€Đ°ŅƒĐˇĐĩŅ€Đ°Đŧи и ĐžĐŋĐĩŅ€Đ°Ņ†Đ¸ĐžĐŊĐŊŅ‹Đŧи ŅĐ¸ŅŅ‚ĐĩĐŧаĐŧи - -## ĐĸĐĩĐēŅƒŅ‰Đ¸Đĩ ĐžĐŗŅ€Đ°ĐŊĐ¸Ņ‡ĐĩĐŊĐ¸Ņ - -- ФаКĐģĐžĐ˛Ņ‹Đĩ Ņ‚Ņ€Đ°ĐŊСаĐēŅ†Đ¸Đ¸ ĐŊĐĩ ҁĐļĐ°Ņ‚Ņ‹ -- НĐĩŅ‚ ĐŋŅ€Đ¸Đ˛Đ°Ņ‚ĐŊҋ҅ ŅĐ°ĐšŅ‚ĐžĐ˛ - -## КаĐē ŅŅ‚Đž Ņ€Đ°ĐąĐžŅ‚Đ°ĐĩŅ‚? - -- ĐŸĐžŅĐģĐĩ СаĐŋ҃ҁĐēа `zeronet.py` Đ˛Ņ‹ ҁĐŧĐžĐļĐĩŅ‚Đĩ ĐŋĐžŅĐĩŅ‰Đ°Ņ‚ŅŒ ŅĐ°ĐšŅ‚Ņ‹ в ZeroNet, Đ¸ŅĐŋĐžĐģŅŒĐˇŅƒŅ Đ°Đ´Ņ€Đĩҁ - `http://127.0.0.1:43110/{zeronet_Đ°Đ´Ņ€Đĩҁ}` - (НаĐŋŅ€Đ¸ĐŧĐĩŅ€: `http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`). -- ĐšĐžĐŗĐ´Đ° Đ˛Ņ‹ ĐŋĐžŅĐĩŅ‰Đ°ĐĩŅ‚Đĩ ĐŊĐžĐ˛Ņ‹Đš ŅĐ°ĐšŅ‚ в ZeroNet, ĐžĐŊ ĐŋŅ‹Ņ‚Đ°ĐĩŅ‚ŅŅ ĐŊĐ°ĐšŅ‚Đ¸ ĐŋĐ¸Ņ€ĐžĐ˛ ҁ ĐŋĐžĐŧĐžŅ‰ŅŒŅŽ ĐŋŅ€ĐžŅ‚ĐžĐēĐžĐģа BitTorrent, - Ņ‡Ņ‚ĐžĐąŅ‹ ҁĐēĐ°Ņ‡Đ°Ņ‚ŅŒ ҃ ĐŊĐ¸Ņ… Ņ„Đ°ĐšĐģŅ‹ ŅĐ°ĐšŅ‚Đ° (HTML, CSS, JS и Ņ‚.Đ´.). -- ĐŸĐžŅĐģĐĩ ĐŋĐžŅĐĩ҉ĐĩĐŊĐ¸Ņ ŅĐ°ĐšŅ‚Đ° Đ˛Ņ‹ Ņ‚ĐžĐļĐĩ ŅŅ‚Đ°ĐŊĐžĐ˛Đ¸Ņ‚ĐĩҁҌ ĐĩĐŗĐž ĐŋĐ¸Ņ€ĐžĐŧ. -- КаĐļĐ´Ņ‹Đš ŅĐ°ĐšŅ‚ ŅĐžĐ´ĐĩŅ€ĐļĐ¸Ņ‚ Ņ„Đ°ĐšĐģ `content.json`, ĐēĐžŅ‚ĐžŅ€Ņ‹Đš ŅĐžĐ´ĐĩŅ€ĐļĐ¸Ņ‚ SHA512 Ņ…ĐĩŅˆĐ¸ Đ˛ŅĐĩŅ… ĐžŅŅ‚Đ°ĐģҌĐŊŅ‹Đĩ Ņ„Đ°ĐšĐģŅ‹ - и ĐŋОдĐŋĐ¸ŅŅŒ, ŅĐžĐˇĐ´Đ°ĐŊĐŊŅƒŅŽ ҁ ĐŋĐžĐŧĐžŅ‰ŅŒŅŽ СаĐēŅ€Ņ‹Ņ‚ĐžĐŗĐž ĐēĐģŅŽŅ‡Đ° ŅĐ°ĐšŅ‚Đ°. -- Đ•ŅĐģи вĐģадĐĩĐģĐĩ҆ ŅĐ°ĐšŅ‚Đ° (Ņ‚ĐžŅ‚, ĐēŅ‚Đž вĐģадĐĩĐĩŅ‚ СаĐēҀҋ҂ҋĐŧ ĐēĐģŅŽŅ‡ĐžĐŧ Đ´ĐģŅ Đ°Đ´Ņ€ĐĩŅĐ° ŅĐ°ĐšŅ‚Đ°) иСĐŧĐĩĐŊŅĐĩŅ‚ ŅĐ°ĐšŅ‚, ĐžĐŊ - ĐŋОдĐŋĐ¸ŅŅ‹Đ˛Đ°ĐĩŅ‚ ĐŊĐžĐ˛Ņ‹Đš `content.json` и ĐŋŅƒĐąĐģиĐē҃ĐĩŅ‚ ĐĩĐŗĐž Đ´ĐģŅ ĐŋĐ¸Ņ€ĐžĐ˛. ĐŸĐžŅĐģĐĩ ŅŅ‚ĐžĐŗĐž ĐŋĐ¸Ņ€Ņ‹ ĐŋŅ€ĐžĐ˛ĐĩŅ€ŅŅŽŅ‚ ҆ĐĩĐģĐžŅŅ‚ĐŊĐžŅŅ‚ŅŒ `content.json` - (Đ¸ŅĐŋĐžĐģŅŒĐˇŅƒŅ ĐŋОдĐŋĐ¸ŅŅŒ), ҁĐēĐ°Ņ‡Đ˛Đ°ŅŽŅ‚ иСĐŧĐĩĐŊŅ‘ĐŊĐŊŅ‹Đĩ Ņ„Đ°ĐšĐģŅ‹ и Ņ€Đ°ŅĐŋŅ€ĐžŅŅ‚Ņ€Đ°ĐŊŅŅŽŅ‚ ĐŊĐžĐ˛Ņ‹Đš ĐēĐžĐŊŅ‚ĐĩĐŊŅ‚ Đ´ĐģŅ Đ´Ņ€ŅƒĐŗĐ¸Ņ… ĐŋĐ¸Ņ€ĐžĐ˛. - -[ĐŸŅ€ĐĩСĐĩĐŊŅ‚Đ°Ņ†Đ¸Ņ Đž ĐēŅ€Đ¸ĐŋŅ‚ĐžĐŗŅ€Đ°Ņ„Đ¸Đ¸ ZeroNet, ОйĐŊОвĐģĐĩĐŊĐ¸ŅŅ… ŅĐ°ĐšŅ‚ĐžĐ˛, ĐŧĐŊĐžĐŗĐžĐŋĐžĐģŅŒĐˇĐžĐ˛Đ°Ņ‚ĐĩĐģҌҁĐēĐ¸Ņ… ŅĐ°ĐšŅ‚Đ°Ņ… Âģ](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) -[Đ§Đ°ŅŅ‚Đž СадаваĐĩĐŧŅ‹Đĩ вОĐŋŅ€ĐžŅŅ‹ Âģ](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) -[ДоĐē҃ĐŧĐĩĐŊŅ‚Đ°Ņ†Đ¸Ņ Ņ€Đ°ĐˇŅ€Đ°ĐąĐžŅ‚Ņ‡Đ¸Đēа ZeroNet Âģ](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) - -## ĐĄĐēŅ€Đ¸ĐŊŅˆĐžŅ‚Ņ‹ - -![Screenshot](https://i.imgur.com/H60OAHY.png) -![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) -[БоĐģҌ҈Đĩ ҁĐēŅ€Đ¸ĐŊŅˆĐžŅ‚ĐžĐ˛ в Đ´ĐžĐē҃ĐŧĐĩĐŊŅ‚Đ°Ņ†Đ¸Đ¸ ZeroNet Âģ](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/) - -## КаĐē ĐŋŅ€Đ¸ŅĐžĐĩдиĐŊĐ¸Ņ‚ŅŒŅŅ? - -### Windows - -- ĐĄĐēĐ°Ņ‡Đ°ĐšŅ‚Đĩ и Ņ€Đ°ŅĐŋаĐēŅƒĐšŅ‚Đĩ Đ°Ņ€Ņ…Đ¸Đ˛ [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26МБ) -- ЗаĐŋŅƒŅŅ‚Đ¸Ņ‚Đĩ `ZeroNet.exe` - -### macOS - -- ĐĄĐēĐ°Ņ‡Đ°ĐšŅ‚Đĩ и Ņ€Đ°ŅĐŋаĐēŅƒĐšŅ‚Đĩ Đ°Ņ€Ņ…Đ¸Đ˛ [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14МБ) -- ЗаĐŋŅƒŅŅ‚Đ¸Ņ‚Đĩ `ZeroNet.app` - -### Linux (64 ĐąĐ¸Ņ‚) - -- ĐĄĐēĐ°Ņ‡Đ°ĐšŅ‚Đĩ и Ņ€Đ°ŅĐŋаĐēŅƒĐšŅ‚Đĩ Đ°Ņ€Ņ…Đ¸Đ˛ [ZeroNet-linux.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip) (14МБ) -- ЗаĐŋŅƒŅŅ‚Đ¸Ņ‚Đĩ `./ZeroNet.sh` - -> **Note** -> ЗаĐŋŅƒŅŅ‚Đ¸Ņ‚Đĩ Ņ‚Đ°ĐēиĐŧ ĐžĐąŅ€Đ°ĐˇĐžĐŧ: `./ZeroNet.sh --ui_ip '*' --ui_restrict Đ˛Đ°Ņˆ_ip_Đ°Đ´Ņ€Đĩҁ`, Ņ‡Ņ‚ĐžĐąŅ‹ Ņ€Đ°ĐˇŅ€ĐĩŅˆĐ¸Ņ‚ŅŒ ŅƒĐ´Đ°ĐģŅ‘ĐŊĐŊĐžĐĩ ĐŋОдĐēĐģŅŽŅ‡ĐĩĐŊиĐĩ Đē вĐĩб–иĐŊŅ‚ĐĩҀ҄ĐĩĐšŅŅƒ. - -### Docker - -ĐžŅ„Đ¸Ņ†Đ¸Đ°ĐģҌĐŊŅ‹Đš ĐžĐąŅ€Đ°Đˇ ĐŊĐ°Ņ…ĐžĐ´Đ¸Ņ‚ŅŅ СдĐĩҁҌ: https://hub.docker.com/r/canewsin/zeronet/ - -### Android (arm, arm64, x86) - -- ДĐģŅ Ņ€Đ°ĐąĐžŅ‚Ņ‹ ҂ҀĐĩĐąŅƒĐĩŅ‚ŅŅ Android ĐēаĐē ĐŧиĐŊиĐŧ҃Đŧ вĐĩŅ€ŅĐ¸Đ¸ 5.0 Lollipop -- [Download from Google Play](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile) -- ĐĄĐēĐ°Ņ‡Đ°Ņ‚ŅŒ APK: https://github.com/canewsin/zeronet_mobile/releases - -### Android (arm, arm64, x86) ОбĐģĐĩĐŗŅ‡Ņ‘ĐŊĐŊŅ‹Đš ĐēĐģиĐĩĐŊŅ‚ Ņ‚ĐžĐģҌĐēĐž Đ´ĐģŅ ĐŋŅ€ĐžŅĐŧĐžŅ‚Ņ€Đ° (1МБ) - -- ДĐģŅ Ņ€Đ°ĐąĐžŅ‚Ņ‹ ҂ҀĐĩĐąŅƒĐĩŅ‚ŅŅ Android ĐēаĐē ĐŧиĐŊиĐŧ҃Đŧ вĐĩŅ€ŅĐ¸Đ¸ 4.1 Jelly Bean -- [Download from Google Play](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite) - -### ĐŖŅŅ‚Đ°ĐŊОвĐēа иС Đ¸ŅŅ…ĐžĐ´ĐŊĐžĐŗĐž ĐēОда - -```sh -wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip -unzip ZeroNet-src.zip -cd ZeroNet -sudo apt-get update -sudo apt-get install python3-pip -sudo python3 -m pip install -r requirements.txt -``` -- ЗаĐŋŅƒŅŅ‚Đ¸Ņ‚Đĩ `python3 zeronet.py` - -ĐžŅ‚ĐēŅ€ĐžĐšŅ‚Đĩ ĐŋŅ€Đ¸Đ˛ĐĩŅ‚ŅŅ‚Đ˛ĐĩĐŊĐŊŅƒŅŽ ŅŅ‚Ņ€Đ°ĐŊĐ¸Ņ†Ņƒ ZeroHello в Đ˛Đ°ŅˆĐĩĐŧ ĐąŅ€Đ°ŅƒĐˇĐĩŅ€Đĩ ĐŋĐž ҁҁҋĐģĐēĐĩ http://127.0.0.1:43110/ - -## КаĐē ĐŧĐŊĐĩ ŅĐžĐˇĐ´Đ°Ņ‚ŅŒ ŅĐ°ĐšŅ‚ в ZeroNet? - -- КĐģиĐēĐŊĐ¸Ņ‚Đĩ ĐŊа **⋮** > **"Create new, empty site"** в ĐŧĐĩĐŊŅŽ ĐŊа ŅĐ°ĐšŅ‚Đĩ [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d). -- Đ’Ņ‹ ĐąŅƒĐ´ĐĩŅ‚Đĩ **ĐŋĐĩŅ€ĐĩĐŊаĐŋŅ€Đ°Đ˛ĐģĐĩĐŊŅ‹** ĐŊа ŅĐžĐ˛ĐĩŅ€ŅˆĐĩĐŊĐŊĐž ĐŊĐžĐ˛Ņ‹Đš ŅĐ°ĐšŅ‚, ĐēĐžŅ‚ĐžŅ€Ņ‹Đš ĐŧĐžĐļĐĩŅ‚ ĐąŅ‹Ņ‚ŅŒ иСĐŧĐĩĐŊŅ‘ĐŊ Ņ‚ĐžĐģҌĐēĐž ваĐŧи! -- Đ’Ņ‹ ĐŧĐžĐļĐĩŅ‚Đĩ ĐŊĐ°ĐšŅ‚Đ¸ и иСĐŧĐĩĐŊĐ¸Ņ‚ŅŒ ĐēĐžĐŊŅ‚ĐĩĐŊŅ‚ Đ˛Đ°ŅˆĐĩĐŗĐž ŅĐ°ĐšŅ‚Đ° в ĐēĐ°Ņ‚Đ°ĐģĐžĐŗĐĩ **data/[Đ°Đ´Ņ€Đĩҁ_Đ˛Đ°ŅˆĐĩĐŗĐž_ŅĐ°ĐšŅ‚Đ°]** -- ĐŸĐžŅĐģĐĩ иСĐŧĐĩĐŊĐĩĐŊиК ĐžŅ‚ĐēŅ€ĐžĐšŅ‚Đĩ Đ˛Đ°Ņˆ ŅĐ°ĐšŅ‚, ĐŋĐĩŅ€ĐĩĐēĐģŅŽŅ‡Đ¸Ņ‚Đĩ вĐģĐĩвО ĐēĐŊĐžĐŋĐē҃ "0" в ĐŋŅ€Đ°Đ˛ĐžĐŧ вĐĩҀ҅ĐŊĐĩĐŧ ŅƒĐŗĐģ҃, ĐˇĐ°Ņ‚ĐĩĐŧ ĐŊаĐļĐŧĐ¸Ņ‚Đĩ ĐēĐŊĐžĐŋĐēи **sign** и **publish** вĐŊĐ¸ĐˇŅƒ - -ĐĄĐģĐĩĐ´ŅƒŅŽŅ‰Đ¸Đĩ ŅˆĐ°ĐŗĐ¸: [ДоĐē҃ĐŧĐĩĐŊŅ‚Đ°Ņ†Đ¸Ņ Ņ€Đ°ĐˇŅ€Đ°ĐąĐžŅ‚Ņ‡Đ¸Đēа ZeroNet](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) - -## ПоддĐĩŅ€ĐļĐ¸Ņ‚Đĩ ĐŋŅ€ĐžĐĩĐēŅ‚ - -- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Đ ĐĩĐēĐžĐŧĐĩĐŊĐ´ŅƒĐĩĐŧ) -- LiberaPay: https://liberapay.com/PramUkesh -- Paypal: https://paypal.me/PramUkesh -- Đ”Ņ€ŅƒĐŗĐ¸Đĩ ҁĐŋĐžŅĐžĐąŅ‹: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive) - -#### ĐĄĐŋĐ°ŅĐ¸ĐąĐž! - -- ЗдĐĩҁҌ Đ˛Ņ‹ ĐŧĐžĐļĐĩŅ‚Đĩ ĐŋĐžĐģŅƒŅ‡Đ¸Ņ‚ŅŒ йОĐģҌ҈Đĩ иĐŊŅ„ĐžŅ€ĐŧĐ°Ņ†Đ¸Đ¸, ĐŋĐžĐŧĐžŅ‰ŅŒ, ĐŋŅ€ĐžŅ‡Đ¸Ņ‚Đ°Ņ‚ŅŒ ҁĐŋĐ¸ŅĐžĐē иСĐŧĐĩĐŊĐĩĐŊиК и Đ¸ŅŅĐģĐĩĐ´ĐžĐ˛Đ°Ņ‚ŅŒ ZeroNet ŅĐ°ĐšŅ‚Ņ‹: https://www.reddit.com/r/zeronetx/ -- ĐžĐąŅ‰ĐĩĐŊиĐĩ ĐŋŅ€ĐžĐ¸ŅŅ…ĐžĐ´Đ¸Ņ‚ ĐŊа ĐēаĐŊаĐģĐĩ [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) иĐģи в [Gitter](https://gitter.im/canewsin/ZeroNet) -- Đ­ĐģĐĩĐēŅ‚Ņ€ĐžĐŊĐŊĐ°Ņ ĐŋĐžŅ‡Ņ‚Đ°: canews.in@gmail.com diff --git a/README-zh-cn.md b/README-zh-cn.md deleted file mode 100644 index 37095ff6..00000000 --- a/README-zh-cn.md +++ /dev/null @@ -1,132 +0,0 @@ -# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet) - -[English](./README.md) - -äŊŋᔍ Bitcoin 加密和 BitTorrent įŊ‘įģœįš„åŽģ中åŋƒåŒ–įŊ‘įģœ - https://zeronet.dev - - -## ä¸ēäģ€äšˆīŧŸ - -* 我äģŦᛏäŋĄåŧ€æ”žīŧŒč‡Ēį”ąīŧŒæ— åŽĄæŸĨįš„įŊ‘įģœå’Œé€ščޝ -* 不äŧšå—å•į‚šæ•…éšœåŊąå“īŧšåĒčĻæœ‰åœ¨įēŋįš„čŠ‚į‚šīŧŒįĢ™į‚šå°ąäŧšäŋæŒåœ¨įēŋ -* æ— æ‰˜įŽĄč´šį”¨īŧšįĢ™į‚šį”ąčŽŋé—Žč€…æ‰˜įŽĄ -* æ— æŗ•å…ŗé—­īŧšå› ä¸ēčŠ‚į‚šæ— å¤„ä¸åœ¨ -* åŋĢ速åšļ可įĻģįēŋčŋčĄŒīŧšåŗäŊŋæ˛Ąæœ‰äē’联įŊ‘čŋžæŽĨ䚟可äģĨäŊŋᔍ - - -## 功čƒŊ - * 厞æ—ļįĢ™į‚šæ›´æ–° - * 支持 Namecoin įš„ .bit 域名 - * åŽ‰čŖ…æ–šäžŋīŧšåĒéœ€č§ŖåŽ‹åšļčŋčĄŒ - * ä¸€é”Žå…‹éš†å­˜åœ¨įš„įĢ™į‚š - * æ— éœ€å¯†į ã€åŸēäēŽ [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) - įš„čŽ¤č¯īŧšæ‚¨įš„č´ĻæˆˇčĸĢä¸Žæ¯”į‰šå¸é’ąåŒ…į›¸åŒįš„åŠ å¯†æ–šæŗ•äŋæŠ¤ - * 内åģē SQL æœåŠĄå™¨å’Œ P2P 数捎同æ­ĨīŧščŽŠåŧ€å‘æ›´įŽ€å•åšļ提升加čŊŊ速åēĻ - * åŒŋ名性īŧšåŽŒæ•´įš„ Tor įŊ‘į윿”¯æŒīŧŒæ”¯æŒé€ščŋ‡ .onion éšč—æœåŠĄį›¸äē’čŋžæŽĨč€Œä¸æ˜¯é€ščŋ‡ IPv4 地址čŋžæŽĨ - * TLS 加密čŋžæŽĨ - * č‡Ē动打åŧ€ uPnP įĢ¯åŖ - * å¤šį”¨æˆˇīŧˆopenproxyīŧ‰æ”¯æŒįš„æ’äģļ - * 适ᔍäēŽäģģäŊ•æĩč§ˆå™¨ / 操äŊœįŗģįģŸ - - -## åŽŸį† - -* 在čŋčĄŒ `zeronet.py` 后īŧŒæ‚¨å°†å¯äģĨ通čŋ‡ - `http://127.0.0.1:43110/{zeronet_address}`īŧˆäž‹åĻ‚īŧš - `http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`īŧ‰čŽŋ问 zeronet ä¸­įš„įĢ™į‚š -* 在您æĩč§ˆ zeronet įĢ™į‚šæ—ļīŧŒåŽĸæˆˇį̝äŧšå°č¯•通čŋ‡ BitTorrent įŊ‘į윿Ĩå¯ģæ‰žå¯į”¨įš„čŠ‚į‚šīŧŒäģŽč€Œä¸‹čŊŊ需čĻįš„æ–‡äģļīŧˆhtmlīŧŒcssīŧŒjs...īŧ‰ -* 您将äŧšå‚¨å­˜æ¯ä¸€ä¸Ēæĩč§ˆčŋ‡įš„įĢ™į‚š -* 每ä¸ĒįĢ™į‚šéƒŊ包åĢ一ä¸Ē名ä¸ē `content.json` įš„æ–‡äģļīŧŒåŽƒå‚¨å­˜äē†å…ļäģ–æ‰€æœ‰æ–‡äģļįš„ sha512 æ•Ŗåˆ—å€ŧäģĨ及一ä¸Ē通čŋ‡įĢ™į‚šį§é’Ĩį”Ÿæˆįš„į­žå -* åĻ‚æžœįĢ™į‚šįš„æ‰€æœ‰č€…īŧˆæ‹Ĩ有įĢ™į‚šåœ°å€įš„į§é’Ĩīŧ‰äŋŽæ”šäē†įĢ™į‚šīŧŒåšļ且äģ– / åĨšį­žåä熿–°įš„ `content.json` į„ļåŽæŽ¨é€č‡ŗå…ļäģ–čŠ‚į‚šīŧŒ - é‚Ŗäšˆčŋ™äē›čŠ‚į‚šå°†äŧšåœ¨äŊŋį”¨į­žåénj蝁 `content.json` įš„įœŸåŽžæ€§åŽīŧŒä¸‹čŊŊäŋŽæ”šåŽįš„æ–‡äģļåšļå°†æ–°å†…åŽšæŽ¨é€č‡ŗåĻå¤–įš„čŠ‚į‚š - -#### [å…ŗäēŽ ZeroNet 加密īŧŒįĢ™į‚šæ›´æ–°īŧŒå¤šį”¨æˆˇįĢ™į‚šįš„åšģၝቇ Âģ](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) -#### [å¸¸č§é—Žéĸ˜ Âģ](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) - -#### [ZeroNet åŧ€å‘č€…æ–‡æĄŖ Âģ](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) - - -## åąåš•æˆĒ回 - -![Screenshot](https://i.imgur.com/H60OAHY.png) -![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) - -#### [ZeroNet æ–‡æĄŖä¸­įš„æ›´å¤šåąåš•æˆĒ回 Âģ](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/) - - -## åĻ‚äŊ•加å…Ĩ - -### Windows - - - 下čŊŊ [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26MB) - - 在äģģæ„äŊįŊŽč§ŖåŽ‹įŧŠ - - čŋčĄŒ `ZeroNet.exe` - -### macOS - - - 下čŊŊ [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14MB) - - 在äģģæ„äŊįŊŽč§ŖåŽ‹įŧŠ - - čŋčĄŒ `ZeroNet.app` - -### Linux (x86-64bit) - - - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip` - - `unzip ZeroNet-linux.zip` - - `cd ZeroNet-linux` - - äŊŋᔍäģĨ下å‘Ŋäģ¤å¯åЍ `./ZeroNet.sh` - - 在æĩč§ˆå™¨æ‰“åŧ€ http://127.0.0.1:43110/ åŗå¯čŽŋ问 ZeroHello éĄĩéĸ - - __提į¤ēīŧš__ č‹ĨčĻå…čŽ¸åœ¨ Web į•Œéĸä¸Šįš„čŋœį¨‹čŋžæŽĨīŧŒäŊŋᔍäģĨ下å‘Ŋäģ¤å¯åЍ `./ZeroNet.sh --ui_ip '*' --ui_restrict your.ip.address` - -### äģŽæēäģŖį åމ誅 - - - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip` - - `unzip ZeroNet-src.zip` - - `cd ZeroNet` - - `sudo apt-get update` - - `sudo apt-get install python3-pip` - - `sudo python3 -m pip install -r requirements.txt` - - äŊŋᔍäģĨ下å‘Ŋäģ¤å¯åЍ `python3 zeronet.py` - - 在æĩč§ˆå™¨æ‰“åŧ€ http://127.0.0.1:43110/ åŗå¯čŽŋ问 ZeroHello éĄĩéĸ - - ### Android (arm, arm64, x86) - - minimum Android version supported 21 (Android 5.0 Lollipop) - - [Download from Google Play](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile) - - APK download: https://github.com/canewsin/zeronet_mobile/releases - -### Android (arm, arm64, x86) Thin Client for Preview Only (Size 1MB) - - minimum Android version supported 16 (JellyBean) - - [Download from Google Play](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite) - -## įŽ°æœ‰é™åˆļ - -* äŧ čž“æ–‡äģᅲ￞Ąæœ‰åŽ‹įŧŠ -* ä¸æ”¯æŒį§æœ‰įĢ™į‚š - - -## åĻ‚äŊ•创åģē一ä¸Ē ZeroNet įĢ™į‚šīŧŸ - - * į‚šå‡ģ [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d) įĢ™į‚šįš„ **⋮** > **「新åģēįŠēįĢ™į‚šã€** čœå•éĄš - * 您将čĸĢ**重厚向**到一ä¸Ēå…¨æ–°įš„įĢ™į‚šīŧŒč¯ĨįĢ™į‚šåĒčƒŊį”ąæ‚¨äŋŽæ”š - * 您可äģĨ在 **data/[æ‚¨įš„įĢ™į‚šåœ°å€]** į›ŽåŊ•中扞到åšļäŋŽæ”šįŊ‘įĢ™įš„å†…åŽš - * äŋŽæ”šåŽæ‰“åŧ€æ‚¨įš„įŊ‘įĢ™īŧŒå°†åŗä¸Šč§’įš„ã€Œ0」按钎拖到åˇĻäž§īŧŒį„ļåŽį‚šå‡ģåē•éƒ¨įš„**į­žå**åšļ**发布**按钎 - -æŽĨ下æĨįš„æ­ĨéǤīŧš[ZeroNet åŧ€å‘č€…æ–‡æĄŖ](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) - -## 帎劊čŋ™ä¸ĒéĄšį›Ž -- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Preferred) -- LiberaPay: https://liberapay.com/PramUkesh -- Paypal: https://paypal.me/PramUkesh -- Others: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive) - - -#### 感č°ĸ您īŧ - -* 更多äŋĄæ¯īŧŒå¸ŽåŠŠīŧŒå˜æ›´čްåŊ•å’Œ zeronet įĢ™į‚šīŧšhttps://www.reddit.com/r/zeronetx/ -* 前垀 [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) 或 [gitter](https://gitter.im/canewsin/ZeroNet) 和我äģŦčŠå¤Š -* [čŋ™é‡Œ](https://gitter.im/canewsin/ZeroNet)是一ä¸Ē gitter ä¸Šįš„ä¸­æ–‡čŠå¤ŠåŽ¤ -* Email: canews.in@gmail.com diff --git a/README.md b/README.md index 70b79adc..713a1779 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,16 @@ -# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet) - -Decentralized websites using Bitcoin crypto and the BitTorrent network - https://zeronet.dev / [ZeroNet Site](http://127.0.0.1:43110/1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX/), Unlike Bitcoin, ZeroNet Doesn't need a blockchain to run, But uses cryptography used by BTC, to ensure data integrity and validation. +# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.readthedocs.org/en/latest/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.readthedocs.org/en/latest/help_zeronet/donate/) + +Decentralized websites using Bitcoin crypto and the BitTorrent network - http://zeronet.io ## Why? * We believe in open, free, and uncensored network and communication. -* No single point of failure: Site remains online so long as at least 1 peer is +* No single point of failure: Site remains online so long as at least 1 peer serving it. * No hosting costs: Sites are served by visitors. * Impossible to shut down: It's nowhere because it's everywhere. -* Fast and works offline: You can access the site even if Internet is +* Fast and works offline: You can access the site even if your internet is unavailable. @@ -20,11 +20,11 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - https:/ * Easy to setup: unpack & run * Clone websites in one click * Password-less [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) - based authorization: Your account is protected by the same cryptography as your Bitcoin wallet + based authorization: Your account is protected by same cryptography as your Bitcoin wallet * Built-in SQL server with P2P data synchronization: Allows easier site development and faster page load times - * Anonymity: Full Tor network support with .onion hidden services instead of IPv4 addresses + * Tor network support * TLS encrypted connections - * Automatic uPnP port opening + * Automatic, uPnP port opening * Plugin for multiuser (openproxy) support * Works with any browser/OS @@ -33,124 +33,154 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - https:/ * After starting `zeronet.py` you will be able to visit zeronet sites using `http://127.0.0.1:43110/{zeronet_address}` (eg. - `http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`). + `http://127.0.0.1:43110/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr`). * When you visit a new zeronet site, it tries to find peers using the BitTorrent network so it can download the site files (html, css, js...) from them. -* Each visited site is also served by you. -* Every site contains a `content.json` file which holds all other files in a sha512 hash - and a signature generated using the site's private key. +* Each visited site becomes also served by you. +* Every site contains a `content.json` which holds all other files in a sha512 hash + and a signature generated using site's private key. * If the site owner (who has the private key for the site address) modifies the - site and signs the new `content.json` and publishes it to the peers. - Afterwards, the peers verify the `content.json` integrity (using the + site, then he/she signs the new `content.json` and publishes it to the peers. + After the peers have verified the `content.json` integrity (using the signature), they download the modified files and publish the new content to other peers. #### [Slideshow about ZeroNet cryptography, site updates, multi-user sites Âģ](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) -#### [Frequently asked questions Âģ](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) +#### [Frequently asked questions Âģ](http://zeronet.readthedocs.org/en/latest/faq/) -#### [ZeroNet Developer Documentation Âģ](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) ## Screenshots -![Screenshot](https://i.imgur.com/H60OAHY.png) -![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) +![Screenshot](http://zeronet.readthedocs.org/en/latest/img/zerohello.png) +![ZeroTalk](http://zeronet.readthedocs.org/en/latest/img/zerotalk.png) -#### [More screenshots in ZeroNet docs Âģ](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/) +#### [More screenshots in ZeroNet docs Âģ](http://zeronet.readthedocs.org/en/latest/using_zeronet/sample_sites/) -## How to join +## How to join? ### Windows - - Download [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26MB) - - Unpack anywhere - - Run `ZeroNet.exe` - -### macOS +* [Download ZeroBundle package](https://github.com/HelloZeroNet/ZeroBundle/releases/download/0.1.1/ZeroBundle-v0.1.1.zip) that includes Python 2.7.9 and all required libraries +* Unpack to any directory +* Run `zeronet.cmd` - - Download [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14MB) - - Unpack anywhere - - Run `ZeroNet.app` - -### Linux (x86-64bit) - - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip` - - `unzip ZeroNet-linux.zip` - - `cd ZeroNet-linux` - - Start with: `./ZeroNet.sh` - - Open the ZeroHello landing page in your browser by navigating to: http://127.0.0.1:43110/ - - __Tip:__ Start with `./ZeroNet.sh --ui_ip '*' --ui_restrict your.ip.address` to allow remote connections on the web interface. - - ### Android (arm, arm64, x86) - - minimum Android version supported 21 (Android 5.0 Lollipop) - - [Download from Google Play](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile) - - APK download: https://github.com/canewsin/zeronet_mobile/releases - -### Android (arm, arm64, x86) Thin Client for Preview Only (Size 1MB) - - minimum Android version supported 16 (JellyBean) - - [Download from Google Play](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite) +It downloads the latest version of ZeroNet then starts it automatically. -#### Docker -There is an official image, built from source at: https://hub.docker.com/r/canewsin/zeronet/ +#### Alternative method for Windows by installing Python -### Online Proxies -Proxies are like seed boxes for sites(i.e ZNX runs on a cloud vps), you can try zeronet experience from proxies. Add your proxy below if you have one. +* [Install Python 2.7](https://www.python.org/ftp/python/2.7.9/python-2.7.9.msi) +* [Install Python Greenlet](http://zeronet.io/files/windows/greenlet-0.4.5.win32-py2.7.exe) +* [Install Python Gevent](http://zeronet.io/files/windows/gevent-1.0.1.win32-py2.7.exe) +* [Install Python MsgPack](http://zeronet.io/files/windows/msgpack-python-0.4.2.win32-py2.7.exe) +* [Download and extract ZeroNet](https://codeload.github.com/HelloZeroNet/ZeroNet/zip/master) to any directory +* Run `start.py` -#### Official ZNX Proxy : +### Linux -https://proxy.zeronet.dev/ +#### Debian -https://zeronet.dev/ +* `sudo apt-get update` +* `sudo apt-get install msgpack-python python-gevent` +* `wget https://github.com/HelloZeroNet/ZeroNet/archive/master.tar.gz` +* `tar xvpfz master.tar.gz` +* `cd ZeroNet-master` +* Start with `python zeronet.py` +* Open http://127.0.0.1:43110/ in your browser and enjoy! :) -#### From Community +#### Other Linux or without root access +* Check your python version using `python --version` if the returned version is not `Python 2.7.X` then try `python2` or `python2.7` command and use it from now +* `wget https://bootstrap.pypa.io/get-pip.py` +* `python get-pip.py --user gevent msgpack-python` +* Start with `python zeronet.py` -https://0net-preview.com/ +### Mac -https://portal.ngnoid.tv/ + * Install [Homebrew](http://brew.sh/) + * `brew install python` + * `pip install gevent msgpack-python` + * [Download](https://github.com/HelloZeroNet/ZeroNet/archive/master.zip), Unpack, run `python zeronet.py` -https://zeronet.ipfsscan.io/ +### Vagrant +* `vagrant up` +* Access VM with `vagrant ssh` +* `cd /vagrant` +* Run `python zeronet.py --ui_ip 0.0.0.0` +* Open http://127.0.0.1:43110/ in your browser -### Install from source - - - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip` - - `unzip ZeroNet-src.zip` - - `cd ZeroNet` - - `sudo apt-get update` - - `sudo apt-get install python3-pip` - - `sudo python3 -m pip install -r requirements.txt` - - Start with: `python3 zeronet.py` - - Open the ZeroHello landing page in your browser by navigating to: http://127.0.0.1:43110/ +### Docker +* `docker run -p 15441:15441 -p 43110:43110 nofish/zeronet` +* Open http://127.0.0.1:43110/ in your browser ## Current limitations -* File transactions are not compressed +* No torrent-like, file splitting for big file support +* No more anonymous than Bittorrent +* File transactions are not compressed ~~or encrypted yet~~ * No private sites +* ~~You must have an open port to publish new changes~~ +* ~~Timeout problems on slow connections~~ ## How can I create a ZeroNet site? - * Click on **⋮** > **"Create new, empty site"** menu item on the site [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d). - * You will be **redirected** to a completely new site that is only modifiable by you! - * You can find and modify your site's content in **data/[yoursiteaddress]** directory - * After the modifications open your site, drag the topright "0" button to left, then press **sign** and **publish** buttons on the bottom +Shut down zeronet if you are running it already -Next steps: [ZeroNet Developer Documentation](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) +```bash +$ zeronet.py siteCreate +... +- Site private key: 23DKQpzxhbVBrAtvLEc2uvk7DZweh4qL3fn3jpM3LgHDczMK2TtYUq +- Site address: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +... +- Site created! +$ zeronet.py +... +``` -## Help keep this project alive -- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Preferred) -- LiberaPay: https://liberapay.com/PramUkesh -- Paypal: https://paypal.me/PramUkesh -- Others: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive) +Congratulations, you're finished! Now anyone can access your site using +`http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2` + +Next steps: [ZeroNet Developer Documentation](http://zeronet.readthedocs.org/en/latest/site_development/getting_started/) + + +## How can I modify a ZeroNet site? + +* Modify files located in data/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 directory. + After you're finished: + +```bash +$ zeronet.py siteSign 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +- Signing site: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2... +Private key (input hidden): +``` + +* Enter the private key you got when you created the site, then: + +```bash +$ zeronet.py sitePublish 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +... +Site:13DNDk..bhC2 Publishing to 3/10 peers... +Site:13DNDk..bhC2 Successfuly published to 3 peers +- Serving files.... +``` + +* That's it! You've successfully signed and published your modifications. + + +## If you want to help keep this project alive + +- Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX +- Paypal: https://zeronet.readthedocs.org/en/latest/help_zeronet/donate/ + +### Sponsors + +* Better OSX/Safari compatibility made possible by [BrowserStack.com](https://www.browserstack.com) #### Thank you! -* More info, help, changelog, zeronet sites: https://www.reddit.com/r/zeronetx/ -* Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) or on [gitter](https://gitter.im/canewsin/ZeroNet) -* Email: canews.in@gmail.com +* More info, help, changelog, zeronet sites: http://www.reddit.com/r/zeronet/ +* Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) or on [gitter](https://gitter.im/HelloZeroNet/ZeroNet) +* Email: hello@noloop.me diff --git a/Vagrantfile b/Vagrantfile index 24fe0c45..6c4da894 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -40,6 +40,6 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| config.vm.provision "shell", inline: "sudo apt-get install msgpack-python python-gevent python-pip python-dev -y" config.vm.provision "shell", - inline: "sudo pip install msgpack --upgrade" + inline: "sudo pip install msgpack-python --upgrade" end diff --git a/plugins b/plugins deleted file mode 160000 index 689d9309..00000000 --- a/plugins +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 689d9309f73371f4681191b125ec3f2e14075eeb diff --git a/plugins/CryptMessage/CryptMessage.py b/plugins/CryptMessage/CryptMessage.py new file mode 100644 index 00000000..955dd9b1 --- /dev/null +++ b/plugins/CryptMessage/CryptMessage.py @@ -0,0 +1,53 @@ +from lib.pybitcointools import bitcoin as btctools +import hashlib + +ecc_cache = {} + + +def encrypt(data, pubkey, ephemcurve=None, ciphername='aes-256-cbc'): + from lib import pyelliptic + curve, pubkey_x, pubkey_y, i = pyelliptic.ECC._decode_pubkey(pubkey) + if ephemcurve is None: + ephemcurve = curve + ephem = pyelliptic.ECC(curve=ephemcurve) + key = hashlib.sha512(ephem.raw_get_ecdh_key(pubkey_x, pubkey_y)).digest() + key_e, key_m = key[:32], key[32:] + pubkey = ephem.get_pubkey() + iv = pyelliptic.OpenSSL.rand(pyelliptic.OpenSSL.get_cipher(ciphername).get_blocksize()) + ctx = pyelliptic.Cipher(key_e, iv, 1, ciphername) + ciphertext = iv + pubkey + ctx.ciphering(data) + mac = pyelliptic.hmac_sha256(key_m, ciphertext) + return key_e, ciphertext + mac + + +def split(encrypted): + iv = encrypted[0:16] + ciphertext = encrypted[16+70:-32] + + return iv, ciphertext + + +def getEcc(privatekey=None): + from lib import pyelliptic + global eccs + if privatekey not in ecc_cache: + if privatekey: + publickey_bin = btctools.encode_pubkey(btctools.privtopub(privatekey), "bin") + publickey_openssl = toOpensslPublickey(publickey_bin) + privatekey_openssl = toOpensslPrivatekey(privatekey) + ecc_cache[privatekey] = pyelliptic.ECC(curve='secp256k1', privkey=privatekey_openssl, pubkey=publickey_openssl) + else: + ecc_cache[None] = pyelliptic.ECC() + return ecc_cache[privatekey] + + +def toOpensslPrivatekey(privatekey): + privatekey_bin = btctools.encode_privkey(privatekey, "bin") + return '\x02\xca\x00\x20' + privatekey_bin + + +def toOpensslPublickey(publickey): + publickey_bin = btctools.encode_pubkey(publickey, "bin") + publickey_bin = publickey_bin[1:] + publickey_openssl = '\x02\xca\x00 ' + publickey_bin[:32] + '\x00 ' + publickey_bin[32:] + return publickey_openssl diff --git a/plugins/CryptMessage/CryptMessagePlugin.py b/plugins/CryptMessage/CryptMessagePlugin.py new file mode 100644 index 00000000..0302c83a --- /dev/null +++ b/plugins/CryptMessage/CryptMessagePlugin.py @@ -0,0 +1,149 @@ +import base64 +import os + +from Plugin import PluginManager +from Crypt import CryptBitcoin +from lib.pybitcointools import bitcoin as btctools + +import CryptMessage + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def encrypt(self, text, publickey): + encrypted = CryptMessage.encrypt(text, CryptMessage.toOpensslPublickey(publickey)) + return encrypted + + def decrypt(self, encrypted, privatekey): + back = CryptMessage.getEcc(privatekey).decrypt(encrypted) + return back.decode("utf8") + + # - Actions - + + # Returns user's public key unique to site + # Return: Public key + def actionUserPublickey(self, to, index=0): + publickey = self.user.getEncryptPublickey(self.site.address, index) + self.response(to, publickey) + + # Encrypt a text using the publickey or user's sites unique publickey + # Return: Encrypted text using base64 encoding + def actionEciesEncrypt(self, to, text, publickey=0, return_aes_key=False): + if type(publickey) is int: # Encrypt using user's publickey + publickey = self.user.getEncryptPublickey(self.site.address, publickey) + aes_key, encrypted = self.encrypt(text.encode("utf8"), publickey.decode("base64")) + if return_aes_key: + self.response(to, [base64.b64encode(encrypted), base64.b64encode(aes_key)]) + else: + self.response(to, base64.b64encode(encrypted)) + + # Decrypt a text using privatekey or the user's site unique private key + # Return: Decrypted text or list of decrypted texts + def actionEciesDecrypt(self, to, param, privatekey=0): + if type(privatekey) is int: # Decrypt using user's privatekey + privatekey = self.user.getEncryptPrivatekey(self.site.address, privatekey) + + if type(param) == list: + encrypted_texts = param + else: + encrypted_texts = [param] + + texts = [] # Decoded texts + for encrypted_text in encrypted_texts: + try: + text = self.decrypt(encrypted_text.decode("base64"), privatekey) + texts.append(text) + except Exception, err: + texts.append(None) + + if type(param) == list: + self.response(to, texts) + else: + self.response(to, texts[0]) + + # Encrypt a text using AES + # Return: Iv, AES key, Encrypted text + def actionAesEncrypt(self, to, text, key=None, iv=None): + from lib import pyelliptic + + if key: + key = key.decode("base64") + else: + key = os.urandom(32) + + if iv: # Generate new AES key if not definied + iv = iv.decode("base64") + else: + iv = pyelliptic.Cipher.gen_IV('aes-256-cbc') + + if text: + encrypted = pyelliptic.Cipher(key, iv, 1, ciphername='aes-256-cbc').ciphering(text.encode("utf8")) + else: + encrypted = "" + + self.response(to, [base64.b64encode(key), base64.b64encode(iv), base64.b64encode(encrypted)]) + + # Decrypt a text using AES + # Return: Decrypted text + def actionAesDecrypt(self, to, *args): + from lib import pyelliptic + + if len(args) == 3: # Single decrypt + encrypted_texts = [(args[0], args[1])] + keys = [args[2]] + else: # Batch decrypt + encrypted_texts, keys = args + + texts = [] # Decoded texts + for iv, encrypted_text in encrypted_texts: + encrypted_text = encrypted_text.decode("base64") + iv = iv.decode("base64") + text = None + for key in keys: + ctx = pyelliptic.Cipher(key.decode("base64"), iv, 0, ciphername='aes-256-cbc') + try: + decrypted = ctx.ciphering(encrypted_text) + if decrypted and decrypted.decode("utf8"): # Valid text decoded + text = decrypted + except Exception, err: + pass + texts.append(text) + + if len(args) == 3: + self.response(to, texts[0]) + else: + self.response(to, texts) + + +@PluginManager.registerTo("User") +class UserPlugin(object): + def getEncryptPrivatekey(self, address, param_index=0): + assert param_index >= 0 and param_index <= 1000 + site_data = self.getSiteData(address) + + if site_data.get("cert"): # Different privatekey for different cert provider + index = param_index + self.getAddressAuthIndex(site_data["cert"]) + else: + index = param_index + + if "encrypt_privatekey_%s" % index not in site_data: + address_index = self.getAddressAuthIndex(address) + crypt_index = address_index + 1000 + index + site_data["encrypt_privatekey_%s" % index] = CryptBitcoin.hdPrivatekey(self.master_seed, crypt_index) + self.log.debug("New encrypt privatekey generated for %s:%s" % (address, index)) + return site_data["encrypt_privatekey_%s" % index] + + def getEncryptPublickey(self, address, param_index=0): + assert param_index >= 0 and param_index <= 1000 + site_data = self.getSiteData(address) + + if site_data.get("cert"): # Different privatekey for different cert provider + index = param_index + self.getAddressAuthIndex(site_data["cert"]) + else: + index = param_index + + if "encrypt_publickey_%s" % index not in site_data: + privatekey = self.getEncryptPrivatekey(address, param_index) + publickey = btctools.encode_pubkey(btctools.privtopub(privatekey), "bin_compressed") + site_data["encrypt_publickey_%s" % index] = base64.b64encode(publickey) + return site_data["encrypt_publickey_%s" % index] diff --git a/plugins/CryptMessage/Test/TestCrypt.py b/plugins/CryptMessage/Test/TestCrypt.py new file mode 100644 index 00000000..8e16cba2 --- /dev/null +++ b/plugins/CryptMessage/Test/TestCrypt.py @@ -0,0 +1,106 @@ +import pytest +from CryptMessage import CryptMessage + +@pytest.mark.usefixtures("resetSettings") +class TestCrypt: + def testPublickey(self, ui_websocket): + pub = ui_websocket.testAction("UserPublickey", 0) + assert len(pub) == 44 # Compressed, b64 encoded publickey + + # Different pubkey for specificed index + assert ui_websocket.testAction("UserPublickey", 1) != ui_websocket.testAction("UserPublickey", 0) + + # Same publickey for same index + assert ui_websocket.testAction("UserPublickey", 2) == ui_websocket.testAction("UserPublickey", 2) + + # Different publickey for different cert + pub1 = ui_websocket.testAction("UserPublickey", 0) + site_data = ui_websocket.user.getSiteData(ui_websocket.site.address) + site_data["cert"] = "zeroid.bit" + pub2 = ui_websocket.testAction("UserPublickey", 0) + assert pub1 != pub2 + + + + def testEcies(self, ui_websocket): + ui_websocket.actionUserPublickey(0, 0) + pub = ui_websocket.ws.result + + ui_websocket.actionEciesEncrypt(0, "hello", pub) + encrypted = ui_websocket.ws.result + assert len(encrypted) == 180 + + # Don't allow decrypt using other privatekey index + ui_websocket.actionEciesDecrypt(0, encrypted, 123) + decrypted = ui_websocket.ws.result + assert decrypted != "hello" + + # Decrypt using correct privatekey + ui_websocket.actionEciesDecrypt(0, encrypted) + decrypted = ui_websocket.ws.result + assert decrypted == "hello" + + # Decrypt batch + ui_websocket.actionEciesDecrypt(0, [encrypted, "baad", encrypted]) + decrypted = ui_websocket.ws.result + assert decrypted == ["hello", None, "hello"] + + + def testEciesUtf8(self, ui_websocket): + # Utf8 test + utf8_text = u'\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9p' + ui_websocket.actionEciesEncrypt(0, utf8_text) + encrypted = ui_websocket.ws.result + + ui_websocket.actionEciesDecrypt(0, encrypted) + assert ui_websocket.ws.result == utf8_text + + + def testEciesAes(self, ui_websocket): + ui_websocket.actionEciesEncrypt(0, "hello", return_aes_key=True) + ecies_encrypted, aes_key = ui_websocket.ws.result + + # Decrypt using Ecies + ui_websocket.actionEciesDecrypt(0, ecies_encrypted) + assert ui_websocket.ws.result == "hello" + + # Decrypt using AES + aes_iv, aes_encrypted = CryptMessage.split(ecies_encrypted.decode("base64")) + + ui_websocket.actionAesDecrypt(0, aes_iv.encode("base64"), aes_encrypted.encode("base64"), aes_key) + assert ui_websocket.ws.result == "hello" + + + def testAes(self, ui_websocket): + ui_websocket.actionAesEncrypt(0, "hello") + key, iv, encrypted = ui_websocket.ws.result + + assert len(key) == 44 + assert len(iv) == 24 + assert len(encrypted) == 24 + + # Single decrypt + ui_websocket.actionAesDecrypt(0, iv, encrypted, key) + assert ui_websocket.ws.result == "hello" + + # Batch decrypt + ui_websocket.actionAesEncrypt(0, "hello") + key2, iv2, encrypted2 = ui_websocket.ws.result + + assert [key, iv, encrypted] != [key2, iv2, encrypted2] + + # 2 correct key + ui_websocket.actionAesDecrypt(0, [[iv, encrypted], [iv, encrypted], [iv, "baad"], [iv2, encrypted2]], [key]) + assert ui_websocket.ws.result == ["hello", "hello", None, None] + + # 3 key + ui_websocket.actionAesDecrypt(0, [[iv, encrypted], [iv, encrypted], [iv, "baad"], [iv2, encrypted2]], [key, key2]) + assert ui_websocket.ws.result == ["hello", "hello", None, "hello"] + + def testAesUtf8(self, ui_websocket): + utf8_text = u'\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9' + ui_websocket.actionAesEncrypt(0, utf8_text) + key, iv, encrypted = ui_websocket.ws.result + + ui_websocket.actionAesDecrypt(0, iv, encrypted, key) + assert ui_websocket.ws.result == utf8_text diff --git a/plugins/CryptMessage/Test/conftest.py b/plugins/CryptMessage/Test/conftest.py new file mode 100644 index 00000000..8c1df5b2 --- /dev/null +++ b/plugins/CryptMessage/Test/conftest.py @@ -0,0 +1 @@ +from src.Test.conftest import * \ No newline at end of file diff --git a/plugins/CryptMessage/Test/pytest.ini b/plugins/CryptMessage/Test/pytest.ini new file mode 100644 index 00000000..d09210d1 --- /dev/null +++ b/plugins/CryptMessage/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/CryptMessage/__init__.py b/plugins/CryptMessage/__init__.py new file mode 100644 index 00000000..3eb41820 --- /dev/null +++ b/plugins/CryptMessage/__init__.py @@ -0,0 +1 @@ +import CryptMessagePlugin \ No newline at end of file diff --git a/plugins/Sidebar/SidebarPlugin.py b/plugins/Sidebar/SidebarPlugin.py new file mode 100644 index 00000000..ab258822 --- /dev/null +++ b/plugins/Sidebar/SidebarPlugin.py @@ -0,0 +1,468 @@ +import re +import os +import cgi +import sys +import math +import time +try: + import cStringIO as StringIO +except: + import StringIO + + +from Config import config +from Plugin import PluginManager +from Debug import Debug + +plugin_dir = "plugins/Sidebar" +media_dir = plugin_dir + "/media" +sys.path.append(plugin_dir) # To able to load geoip lib + +loc_cache = {} + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + # Inject our resources to end of original file streams + def actionUiMedia(self, path): + if path == "/uimedia/all.js" or path == "/uimedia/all.css": + # First yield the original file and header + body_generator = super(UiRequestPlugin, self).actionUiMedia(path) + for part in body_generator: + yield part + + # Append our media file to the end + ext = re.match(".*(js|css)$", path).group(1) + plugin_media_file = "%s/all.%s" % (media_dir, ext) + if config.debug: + # If debugging merge *.css to all.css and *.js to all.js + from Debug import DebugMedia + DebugMedia.merge(plugin_media_file) + for part in self.actionFile(plugin_media_file, send_header=False): + yield part + elif path.startswith("/uimedia/globe/"): # Serve WebGL globe files + file_name = re.match(".*/(.*)", path).group(1) + plugin_media_file = "%s-globe/%s" % (media_dir, file_name) + if config.debug and path.endswith("all.js"): + # If debugging merge *.css to all.css and *.js to all.js + from Debug import DebugMedia + DebugMedia.merge(plugin_media_file) + for part in self.actionFile(plugin_media_file): + yield part + else: + for part in super(UiRequestPlugin, self).actionUiMedia(path): + yield part + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + + def sidebarRenderPeerStats(self, body, site): + connected = len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]) + connectable = len([peer_id for peer_id in site.peers.keys() if not peer_id.endswith(":0")]) + peers_total = len(site.peers) + if peers_total: + percent_connected = float(connected) / peers_total + percent_connectable = float(connectable) / peers_total + else: + percent_connectable = percent_connected = 0 + body.append(""" +
  • + +
      +
    • +
    • +
    • +
    +
      +
    • connected:{connected}
    • +
    • Connectable:{connectable}
    • +
    • Total:{peers_total}
    • +
    +
  • + """.format(**locals())) + + def sidebarRenderTransferStats(self, body, site): + recv = float(site.settings.get("bytes_recv", 0)) / 1024 / 1024 + sent = float(site.settings.get("bytes_sent", 0)) / 1024 / 1024 + transfer_total = recv + sent + if transfer_total: + percent_recv = recv / transfer_total + percent_sent = sent / transfer_total + else: + percent_recv = 0.5 + percent_sent = 0.5 + body.append(""" +
  • + +
      +
    • +
    • +
    +
      +
    • Received:{recv:.2f}MB
    • +
    • Sent:{sent:.2f}MB
    • +
    +
  • + """.format(**locals())) + + def sidebarRenderFileStats(self, body, site): + body.append("
    • ") + + extensions = ( + ("html", "yellow"), + ("css", "orange"), + ("js", "purple"), + ("image", "green"), + ("json", "blue"), + ("other", "white"), + ("total", "black") + ) + # Collect stats + size_filetypes = {} + size_total = 0 + for content in site.content_manager.contents.values(): + if "files" not in content: + continue + for file_name, file_details in content["files"].items(): + size_total += file_details["size"] + ext = file_name.split(".")[-1] + size_filetypes[ext] = size_filetypes.get(ext, 0) + file_details["size"] + size_other = size_total + + # Bar + for extension, color in extensions: + if extension == "total": + continue + if extension == "other": + size = size_other + elif extension == "image": + size = size_filetypes.get("jpg", 0) + size_filetypes.get("png", 0) + size_filetypes.get("gif", 0) + size_other -= size + else: + size = size_filetypes.get(extension, 0) + size_other -= size + percent = 100 * (float(size) / size_total) + body.append(u"
    • " % (percent, extension, color, extension)) + + # Legend + body.append("
      ") + for extension, color in extensions: + if extension == "other": + size = size_other + elif extension == "image": + size = size_filetypes.get("jpg", 0) + size_filetypes.get("png", 0) + size_filetypes.get("gif", 0) + elif extension == "total": + size = size_total + else: + size = size_filetypes.get(extension, 0) + + if extension == "js": + title = "javascript" + else: + title = extension + + if size > 1024 * 1024 * 10: # Format as mB is more than 10mB + size_formatted = "%.0fMB" % (size / 1024 / 1024) + else: + size_formatted = "%.0fkB" % (size / 1024) + + body.append(u"
    • %s:%s
    • " % (color, title, size_formatted)) + + body.append("
  • ") + + def getFreeSpace(self): + free_space = 0 + if "statvfs" in dir(os): # Unix + statvfs = os.statvfs(config.data_dir) + free_space = statvfs.f_frsize * statvfs.f_bavail + else: # Windows + try: + import ctypes + free_space_pointer = ctypes.c_ulonglong(0) + ctypes.windll.kernel32.GetDiskFreeSpaceExW( + ctypes.c_wchar_p(config.data_dir), None, None, ctypes.pointer(free_space_pointer) + ) + free_space = free_space_pointer.value + except Exception, err: + self.log.debug("GetFreeSpace error: %s" % err) + return free_space + + def sidebarRenderSizeLimit(self, body, site): + free_space = self.getFreeSpace() / 1024 / 1024 + size = float(site.settings["size"]) / 1024 / 1024 + size_limit = site.getSizeLimit() + percent_used = size / size_limit + body.append(""" +
  • + + MB + Set +
  • + """.format(**locals())) + + + def sidebarRenderOptionalFileStats(self, body, site): + size_total = 0.0 + size_downloaded = 0.0 + for content in site.content_manager.contents.values(): + if "files_optional" not in content: + continue + for file_name, file_details in content["files_optional"].items(): + size_total += file_details["size"] + if site.content_manager.hashfield.hasHash(file_details["sha512"]): + size_downloaded += file_details["size"] + + + if not size_total: + return False + + percent_downloaded = size_downloaded / size_total + + size_formatted_total = size_total / 1024 / 1024 + size_formatted_downloaded = size_downloaded / 1024 / 1024 + + body.append(""" +
  • + +
      +
    • +
    • +
    +
      +
    • Downloaded:{size_formatted_downloaded:.2f}MB
    • +
    • Total:{size_formatted_total:.2f}MB
    • +
    +
  • + """.format(**locals())) + + return True + + def sidebarRenderOptionalFileSettings(self, body, site): + if self.site.settings.get("autodownloadoptional"): + checked = "checked='checked'" + else: + checked = "" + body.append(""" +
  • + +
    +
  • + """.format(**locals())) + + def sidebarRenderDbOptions(self, body, site): + if not site.storage.db: + return False + + inner_path = site.storage.getInnerPath(site.storage.db.db_path) + size = float(site.storage.getSize(inner_path)) / 1024 + body.append(u""" +
  • + + + +
  • + """.format(**locals())) + + def sidebarRenderIdentity(self, body, site): + auth_address = self.user.getAuthAddress(self.site.address) + body.append(""" +
  • + + {auth_address} + Change +
  • + """.format(**locals())) + + def sidebarRenderOwnedCheckbox(self, body, site): + if self.site.settings["own"]: + checked = "checked='checked'" + else: + checked = "" + + body.append(""" +

    This is my site

    +
    + """.format(**locals())) + + def sidebarRenderOwnSettings(self, body, site): + title = cgi.escape(site.content_manager.contents["content.json"]["title"], True) + description = cgi.escape(site.content_manager.contents["content.json"]["description"], True) + privatekey = cgi.escape(self.user.getSiteData(site.address, create=False).get("privatekey", "")) + + body.append(u""" +
  • + + +
  • + +
  • + + +
  • + +
  • + + +
  • + +
  • + Save site settings +
  • + """.format(**locals())) + + def sidebarRenderContents(self, body, site): + body.append(""" +
  • + + + + Sign + Publish +
  • + """) + + def actionSidebarGetHtmlTag(self, to): + site = self.site + + body = [] + + body.append("
    ") + body.append("

    %s

    " % site.content_manager.contents["content.json"]["title"]) + + body.append("
    ") + + body.append("
      ") + + self.sidebarRenderPeerStats(body, site) + self.sidebarRenderTransferStats(body, site) + self.sidebarRenderFileStats(body, site) + self.sidebarRenderSizeLimit(body, site) + has_optional = self.sidebarRenderOptionalFileStats(body, site) + if has_optional: + self.sidebarRenderOptionalFileSettings(body, site) + self.sidebarRenderDbOptions(body, site) + self.sidebarRenderIdentity(body, site) + + self.sidebarRenderOwnedCheckbox(body, site) + body.append("
      ") + self.sidebarRenderOwnSettings(body, site) + self.sidebarRenderContents(body, site) + body.append("
      ") + body.append("
    ") + body.append("
    ") + + self.response(to, "".join(body)) + + def downloadGeoLiteDb(self, db_path): + import urllib + import gzip + import shutil + + self.log.info("Downloading GeoLite2 City database...") + self.cmd("notification", ["geolite-info", "Downloading GeoLite2 City database (one time only, ~15MB)...", 0]) + try: + # Download + file = urllib.urlopen("http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz") + data = StringIO.StringIO() + while True: + buff = file.read(1024 * 16) + if not buff: + break + data.write(buff) + self.log.info("GeoLite2 City database downloaded (%s bytes), unpacking..." % data.tell()) + data.seek(0) + + # Unpack + with gzip.GzipFile(fileobj=data) as gzip_file: + shutil.copyfileobj(gzip_file, open(db_path, "wb")) + + self.cmd("notification", ["geolite-done", "GeoLite2 City database downloaded!", 5000]) + time.sleep(2) # Wait for notify animation + except Exception, err: + self.cmd("notification", ["geolite-error", "GeoLite2 City database download error: %s!" % err, 0]) + raise err + + def actionSidebarGetPeers(self, to): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + try: + import maxminddb + db_path = config.data_dir + '/GeoLite2-City.mmdb' + if not os.path.isfile(db_path): + self.downloadGeoLiteDb(db_path) + geodb = maxminddb.open_database(db_path) + + peers = self.site.peers.values() + # Find avg ping + ping_times = [ + peer.connection.last_ping_delay + for peer in peers + if peer.connection and peer.connection.last_ping_delay and peer.connection.last_ping_delay + ] + if ping_times: + ping_avg = sum(ping_times) / float(len(ping_times)) + else: + ping_avg = 0 + # Place bars + globe_data = [] + placed = {} # Already placed bars here + for peer in peers: + # Height of bar + if peer.connection and peer.connection.last_ping_delay: + ping = min(0.20, math.log(1 + peer.connection.last_ping_delay / ping_avg, 300)) + else: + ping = -0.03 + + # Query and cache location + if peer.ip in loc_cache: + loc = loc_cache[peer.ip] + else: + loc = geodb.get(peer.ip) + loc_cache[peer.ip] = loc + if not loc or "location" not in loc: + continue + + # Create position array + lat, lon = (loc["location"]["latitude"], loc["location"]["longitude"]) + latlon = "%s,%s" % (lat, lon) + if latlon in placed: # Dont place more than 1 bar to same place, fake repos using ip address last two part + lat += float(128 - int(peer.ip.split(".")[-2])) / 50 + lon += float(128 - int(peer.ip.split(".")[-1])) / 50 + latlon = "%s,%s" % (lat, lon) + placed[latlon] = True + + globe_data += (lat, lon, ping) + # Append myself + loc = geodb.get(config.ip_external) + if loc: + lat, lon = (loc["location"]["latitude"], loc["location"]["longitude"]) + globe_data += (lat, lon, -0.135) + + self.response(to, globe_data) + except Exception, err: + self.log.debug("sidebarGetPeers error: %s" % Debug.formatException(err)) + self.response(to, {"error": err}) + + def actionSiteSetOwned(self, to, owned): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + self.site.settings["own"] = bool(owned) + + + def actionSiteSetAutodownloadoptional(self, to, owned): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + self.site.settings["autodownloadoptional"] = bool(owned) + self.site.update() + self.site.worker_manager.removeGoodFileTasks() diff --git a/plugins/Sidebar/__init__.py b/plugins/Sidebar/__init__.py new file mode 100644 index 00000000..8b61cb4a --- /dev/null +++ b/plugins/Sidebar/__init__.py @@ -0,0 +1 @@ +import SidebarPlugin \ No newline at end of file diff --git a/plugins/Sidebar/maxminddb/__init__.py b/plugins/Sidebar/maxminddb/__init__.py new file mode 100644 index 00000000..fc28186b --- /dev/null +++ b/plugins/Sidebar/maxminddb/__init__.py @@ -0,0 +1,46 @@ +# pylint:disable=C0111 +import os + +import maxminddb.reader + +try: + import maxminddb.extension +except ImportError: + maxminddb.extension = None + +from maxminddb.const import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE, + MODE_MEMORY) +from maxminddb.decoder import InvalidDatabaseError + + +def open_database(database, mode=MODE_AUTO): + """Open a Maxmind DB database + + Arguments: + database -- A path to a valid MaxMind DB file such as a GeoIP2 + database file. + mode -- mode to open the database with. Valid mode are: + * MODE_MMAP_EXT - use the C extension with memory map. + * MODE_MMAP - read from memory map. Pure Python. + * MODE_FILE - read database as standard file. Pure Python. + * MODE_MEMORY - load database into memory. Pure Python. + * MODE_AUTO - tries MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that + order. Default mode. + """ + if (mode == MODE_AUTO and maxminddb.extension and + hasattr(maxminddb.extension, 'Reader')) or mode == MODE_MMAP_EXT: + return maxminddb.extension.Reader(database) + elif mode in (MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY): + return maxminddb.reader.Reader(database, mode) + raise ValueError('Unsupported open mode: {0}'.format(mode)) + + +def Reader(database): # pylint: disable=invalid-name + """This exists for backwards compatibility. Use open_database instead""" + return open_database(database) + +__title__ = 'maxminddb' +__version__ = '1.2.0' +__author__ = 'Gregory Oschwald' +__license__ = 'Apache License, Version 2.0' +__copyright__ = 'Copyright 2014 Maxmind, Inc.' diff --git a/plugins/Sidebar/maxminddb/compat.py b/plugins/Sidebar/maxminddb/compat.py new file mode 100644 index 00000000..14c98832 --- /dev/null +++ b/plugins/Sidebar/maxminddb/compat.py @@ -0,0 +1,28 @@ +import sys + +# pylint: skip-file + +if sys.version_info[0] == 2: + import ipaddr as ipaddress # pylint:disable=F0401 + ipaddress.ip_address = ipaddress.IPAddress + + int_from_byte = ord + + FileNotFoundError = IOError + + def int_from_bytes(b): + if b: + return int(b.encode("hex"), 16) + return 0 + + byte_from_int = chr +else: + import ipaddress # pylint:disable=F0401 + + int_from_byte = lambda x: x + + FileNotFoundError = FileNotFoundError + + int_from_bytes = lambda x: int.from_bytes(x, 'big') + + byte_from_int = lambda x: bytes([x]) diff --git a/plugins/Sidebar/maxminddb/const.py b/plugins/Sidebar/maxminddb/const.py new file mode 100644 index 00000000..59ea84b6 --- /dev/null +++ b/plugins/Sidebar/maxminddb/const.py @@ -0,0 +1,7 @@ +"""Constants used in the API""" + +MODE_AUTO = 0 +MODE_MMAP_EXT = 1 +MODE_MMAP = 2 +MODE_FILE = 4 +MODE_MEMORY = 8 diff --git a/plugins/Sidebar/maxminddb/decoder.py b/plugins/Sidebar/maxminddb/decoder.py new file mode 100644 index 00000000..1b8f0711 --- /dev/null +++ b/plugins/Sidebar/maxminddb/decoder.py @@ -0,0 +1,173 @@ +""" +maxminddb.decoder +~~~~~~~~~~~~~~~~~ + +This package contains code for decoding the MaxMind DB data section. + +""" +from __future__ import unicode_literals + +import struct + +from maxminddb.compat import byte_from_int, int_from_bytes +from maxminddb.errors import InvalidDatabaseError + + +class Decoder(object): # pylint: disable=too-few-public-methods + + """Decoder for the data section of the MaxMind DB""" + + def __init__(self, database_buffer, pointer_base=0, pointer_test=False): + """Created a Decoder for a MaxMind DB + + Arguments: + database_buffer -- an mmap'd MaxMind DB file. + pointer_base -- the base number to use when decoding a pointer + pointer_test -- used for internal unit testing of pointer code + """ + self._pointer_test = pointer_test + self._buffer = database_buffer + self._pointer_base = pointer_base + + def _decode_array(self, size, offset): + array = [] + for _ in range(size): + (value, offset) = self.decode(offset) + array.append(value) + return array, offset + + def _decode_boolean(self, size, offset): + return size != 0, offset + + def _decode_bytes(self, size, offset): + new_offset = offset + size + return self._buffer[offset:new_offset], new_offset + + # pylint: disable=no-self-argument + # |-> I am open to better ways of doing this as long as it doesn't involve + # lots of code duplication. + def _decode_packed_type(type_code, type_size, pad=False): + # pylint: disable=protected-access, missing-docstring + def unpack_type(self, size, offset): + if not pad: + self._verify_size(size, type_size) + new_offset = offset + type_size + packed_bytes = self._buffer[offset:new_offset] + if pad: + packed_bytes = packed_bytes.rjust(type_size, b'\x00') + (value,) = struct.unpack(type_code, packed_bytes) + return value, new_offset + return unpack_type + + def _decode_map(self, size, offset): + container = {} + for _ in range(size): + (key, offset) = self.decode(offset) + (value, offset) = self.decode(offset) + container[key] = value + return container, offset + + _pointer_value_offset = { + 1: 0, + 2: 2048, + 3: 526336, + 4: 0, + } + + def _decode_pointer(self, size, offset): + pointer_size = ((size >> 3) & 0x3) + 1 + new_offset = offset + pointer_size + pointer_bytes = self._buffer[offset:new_offset] + packed = pointer_bytes if pointer_size == 4 else struct.pack( + b'!c', byte_from_int(size & 0x7)) + pointer_bytes + unpacked = int_from_bytes(packed) + pointer = unpacked + self._pointer_base + \ + self._pointer_value_offset[pointer_size] + if self._pointer_test: + return pointer, new_offset + (value, _) = self.decode(pointer) + return value, new_offset + + def _decode_uint(self, size, offset): + new_offset = offset + size + uint_bytes = self._buffer[offset:new_offset] + return int_from_bytes(uint_bytes), new_offset + + def _decode_utf8_string(self, size, offset): + new_offset = offset + size + return self._buffer[offset:new_offset].decode('utf-8'), new_offset + + _type_decoder = { + 1: _decode_pointer, + 2: _decode_utf8_string, + 3: _decode_packed_type(b'!d', 8), # double, + 4: _decode_bytes, + 5: _decode_uint, # uint16 + 6: _decode_uint, # uint32 + 7: _decode_map, + 8: _decode_packed_type(b'!i', 4, pad=True), # int32 + 9: _decode_uint, # uint64 + 10: _decode_uint, # uint128 + 11: _decode_array, + 14: _decode_boolean, + 15: _decode_packed_type(b'!f', 4), # float, + } + + def decode(self, offset): + """Decode a section of the data section starting at offset + + Arguments: + offset -- the location of the data structure to decode + """ + new_offset = offset + 1 + (ctrl_byte,) = struct.unpack(b'!B', self._buffer[offset:new_offset]) + type_num = ctrl_byte >> 5 + # Extended type + if not type_num: + (type_num, new_offset) = self._read_extended(new_offset) + + if not type_num in self._type_decoder: + raise InvalidDatabaseError('Unexpected type number ({type}) ' + 'encountered'.format(type=type_num)) + + (size, new_offset) = self._size_from_ctrl_byte( + ctrl_byte, new_offset, type_num) + return self._type_decoder[type_num](self, size, new_offset) + + def _read_extended(self, offset): + (next_byte,) = struct.unpack(b'!B', self._buffer[offset:offset + 1]) + type_num = next_byte + 7 + if type_num < 7: + raise InvalidDatabaseError( + 'Something went horribly wrong in the decoder. An ' + 'extended type resolved to a type number < 8 ' + '({type})'.format(type=type_num)) + return type_num, offset + 1 + + def _verify_size(self, expected, actual): + if expected != actual: + raise InvalidDatabaseError( + 'The MaxMind DB file\'s data section contains bad data ' + '(unknown data type or corrupt data)' + ) + + def _size_from_ctrl_byte(self, ctrl_byte, offset, type_num): + size = ctrl_byte & 0x1f + if type_num == 1: + return size, offset + bytes_to_read = 0 if size < 29 else size - 28 + + new_offset = offset + bytes_to_read + size_bytes = self._buffer[offset:new_offset] + + # Using unpack rather than int_from_bytes as it is about 200 lookups + # per second faster here. + if size == 29: + size = 29 + struct.unpack(b'!B', size_bytes)[0] + elif size == 30: + size = 285 + struct.unpack(b'!H', size_bytes)[0] + elif size > 30: + size = struct.unpack( + b'!I', size_bytes.rjust(4, b'\x00'))[0] + 65821 + + return size, new_offset diff --git a/plugins/Sidebar/maxminddb/errors.py b/plugins/Sidebar/maxminddb/errors.py new file mode 100644 index 00000000..f04ff028 --- /dev/null +++ b/plugins/Sidebar/maxminddb/errors.py @@ -0,0 +1,11 @@ +""" +maxminddb.errors +~~~~~~~~~~~~~~~~ + +This module contains custom errors for the MaxMind DB reader +""" + + +class InvalidDatabaseError(RuntimeError): + + """This error is thrown when unexpected data is found in the database.""" diff --git a/plugins/Sidebar/maxminddb/extension/maxminddb.c b/plugins/Sidebar/maxminddb/extension/maxminddb.c new file mode 100644 index 00000000..9e4d45e2 --- /dev/null +++ b/plugins/Sidebar/maxminddb/extension/maxminddb.c @@ -0,0 +1,570 @@ +#include +#include +#include "structmember.h" + +#define __STDC_FORMAT_MACROS +#include + +static PyTypeObject Reader_Type; +static PyTypeObject Metadata_Type; +static PyObject *MaxMindDB_error; + +typedef struct { + PyObject_HEAD /* no semicolon */ + MMDB_s *mmdb; +} Reader_obj; + +typedef struct { + PyObject_HEAD /* no semicolon */ + PyObject *binary_format_major_version; + PyObject *binary_format_minor_version; + PyObject *build_epoch; + PyObject *database_type; + PyObject *description; + PyObject *ip_version; + PyObject *languages; + PyObject *node_count; + PyObject *record_size; +} Metadata_obj; + +static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list); +static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list); +static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list); +static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list); + +#if PY_MAJOR_VERSION >= 3 + #define MOD_INIT(name) PyMODINIT_FUNC PyInit_ ## name(void) + #define RETURN_MOD_INIT(m) return (m) + #define FILE_NOT_FOUND_ERROR PyExc_FileNotFoundError +#else + #define MOD_INIT(name) PyMODINIT_FUNC init ## name(void) + #define RETURN_MOD_INIT(m) return + #define PyInt_FromLong PyLong_FromLong + #define FILE_NOT_FOUND_ERROR PyExc_IOError +#endif + +#ifdef __GNUC__ + # define UNUSED(x) UNUSED_ ## x __attribute__((__unused__)) +#else + # define UNUSED(x) UNUSED_ ## x +#endif + +static int Reader_init(PyObject *self, PyObject *args, PyObject *kwds) +{ + char *filename; + int mode = 0; + + static char *kwlist[] = {"database", "mode", NULL}; + if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|i", kwlist, &filename, &mode)) { + return -1; + } + + if (mode != 0 && mode != 1) { + PyErr_Format(PyExc_ValueError, "Unsupported open mode (%i). Only " + "MODE_AUTO and MODE_MMAP_EXT are supported by this extension.", + mode); + return -1; + } + + if (0 != access(filename, R_OK)) { + PyErr_Format(FILE_NOT_FOUND_ERROR, + "No such file or directory: '%s'", + filename); + return -1; + } + + MMDB_s *mmdb = (MMDB_s *)malloc(sizeof(MMDB_s)); + if (NULL == mmdb) { + PyErr_NoMemory(); + return -1; + } + + Reader_obj *mmdb_obj = (Reader_obj *)self; + if (!mmdb_obj) { + free(mmdb); + PyErr_NoMemory(); + return -1; + } + + uint16_t status = MMDB_open(filename, MMDB_MODE_MMAP, mmdb); + + if (MMDB_SUCCESS != status) { + free(mmdb); + PyErr_Format( + MaxMindDB_error, + "Error opening database file (%s). Is this a valid MaxMind DB file?", + filename + ); + return -1; + } + + mmdb_obj->mmdb = mmdb; + return 0; +} + +static PyObject *Reader_get(PyObject *self, PyObject *args) +{ + char *ip_address = NULL; + + Reader_obj *mmdb_obj = (Reader_obj *)self; + if (!PyArg_ParseTuple(args, "s", &ip_address)) { + return NULL; + } + + MMDB_s *mmdb = mmdb_obj->mmdb; + + if (NULL == mmdb) { + PyErr_SetString(PyExc_ValueError, + "Attempt to read from a closed MaxMind DB."); + return NULL; + } + + int gai_error = 0; + int mmdb_error = MMDB_SUCCESS; + MMDB_lookup_result_s result = + MMDB_lookup_string(mmdb, ip_address, &gai_error, + &mmdb_error); + + if (0 != gai_error) { + PyErr_Format(PyExc_ValueError, + "'%s' does not appear to be an IPv4 or IPv6 address.", + ip_address); + return NULL; + } + + if (MMDB_SUCCESS != mmdb_error) { + PyObject *exception; + if (MMDB_IPV6_LOOKUP_IN_IPV4_DATABASE_ERROR == mmdb_error) { + exception = PyExc_ValueError; + } else { + exception = MaxMindDB_error; + } + PyErr_Format(exception, "Error looking up %s. %s", + ip_address, MMDB_strerror(mmdb_error)); + return NULL; + } + + if (!result.found_entry) { + Py_RETURN_NONE; + } + + MMDB_entry_data_list_s *entry_data_list = NULL; + int status = MMDB_get_entry_data_list(&result.entry, &entry_data_list); + if (MMDB_SUCCESS != status) { + PyErr_Format(MaxMindDB_error, + "Error while looking up data for %s. %s", + ip_address, MMDB_strerror(status)); + MMDB_free_entry_data_list(entry_data_list); + return NULL; + } + + MMDB_entry_data_list_s *original_entry_data_list = entry_data_list; + PyObject *py_obj = from_entry_data_list(&entry_data_list); + MMDB_free_entry_data_list(original_entry_data_list); + return py_obj; +} + +static PyObject *Reader_metadata(PyObject *self, PyObject *UNUSED(args)) +{ + Reader_obj *mmdb_obj = (Reader_obj *)self; + + if (NULL == mmdb_obj->mmdb) { + PyErr_SetString(PyExc_IOError, + "Attempt to read from a closed MaxMind DB."); + return NULL; + } + + MMDB_entry_data_list_s *entry_data_list; + MMDB_get_metadata_as_entry_data_list(mmdb_obj->mmdb, &entry_data_list); + MMDB_entry_data_list_s *original_entry_data_list = entry_data_list; + + PyObject *metadata_dict = from_entry_data_list(&entry_data_list); + MMDB_free_entry_data_list(original_entry_data_list); + if (NULL == metadata_dict || !PyDict_Check(metadata_dict)) { + PyErr_SetString(MaxMindDB_error, + "Error decoding metadata."); + return NULL; + } + + PyObject *args = PyTuple_New(0); + if (NULL == args) { + Py_DECREF(metadata_dict); + return NULL; + } + + PyObject *metadata = PyObject_Call((PyObject *)&Metadata_Type, args, + metadata_dict); + + Py_DECREF(metadata_dict); + return metadata; +} + +static PyObject *Reader_close(PyObject *self, PyObject *UNUSED(args)) +{ + Reader_obj *mmdb_obj = (Reader_obj *)self; + + if (NULL != mmdb_obj->mmdb) { + MMDB_close(mmdb_obj->mmdb); + free(mmdb_obj->mmdb); + mmdb_obj->mmdb = NULL; + } + + Py_RETURN_NONE; +} + +static void Reader_dealloc(PyObject *self) +{ + Reader_obj *obj = (Reader_obj *)self; + if (NULL != obj->mmdb) { + Reader_close(self, NULL); + } + + PyObject_Del(self); +} + +static int Metadata_init(PyObject *self, PyObject *args, PyObject *kwds) +{ + + PyObject + *binary_format_major_version, + *binary_format_minor_version, + *build_epoch, + *database_type, + *description, + *ip_version, + *languages, + *node_count, + *record_size; + + static char *kwlist[] = { + "binary_format_major_version", + "binary_format_minor_version", + "build_epoch", + "database_type", + "description", + "ip_version", + "languages", + "node_count", + "record_size", + NULL + }; + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOOOOOOOO", kwlist, + &binary_format_major_version, + &binary_format_minor_version, + &build_epoch, + &database_type, + &description, + &ip_version, + &languages, + &node_count, + &record_size)) { + return -1; + } + + Metadata_obj *obj = (Metadata_obj *)self; + + obj->binary_format_major_version = binary_format_major_version; + obj->binary_format_minor_version = binary_format_minor_version; + obj->build_epoch = build_epoch; + obj->database_type = database_type; + obj->description = description; + obj->ip_version = ip_version; + obj->languages = languages; + obj->node_count = node_count; + obj->record_size = record_size; + + Py_INCREF(obj->binary_format_major_version); + Py_INCREF(obj->binary_format_minor_version); + Py_INCREF(obj->build_epoch); + Py_INCREF(obj->database_type); + Py_INCREF(obj->description); + Py_INCREF(obj->ip_version); + Py_INCREF(obj->languages); + Py_INCREF(obj->node_count); + Py_INCREF(obj->record_size); + + return 0; +} + +static void Metadata_dealloc(PyObject *self) +{ + Metadata_obj *obj = (Metadata_obj *)self; + Py_DECREF(obj->binary_format_major_version); + Py_DECREF(obj->binary_format_minor_version); + Py_DECREF(obj->build_epoch); + Py_DECREF(obj->database_type); + Py_DECREF(obj->description); + Py_DECREF(obj->ip_version); + Py_DECREF(obj->languages); + Py_DECREF(obj->node_count); + Py_DECREF(obj->record_size); + PyObject_Del(self); +} + +static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list) +{ + if (NULL == entry_data_list || NULL == *entry_data_list) { + PyErr_SetString( + MaxMindDB_error, + "Error while looking up data. Your database may be corrupt or you have found a bug in libmaxminddb." + ); + return NULL; + } + + switch ((*entry_data_list)->entry_data.type) { + case MMDB_DATA_TYPE_MAP: + return from_map(entry_data_list); + case MMDB_DATA_TYPE_ARRAY: + return from_array(entry_data_list); + case MMDB_DATA_TYPE_UTF8_STRING: + return PyUnicode_FromStringAndSize( + (*entry_data_list)->entry_data.utf8_string, + (*entry_data_list)->entry_data.data_size + ); + case MMDB_DATA_TYPE_BYTES: + return PyByteArray_FromStringAndSize( + (const char *)(*entry_data_list)->entry_data.bytes, + (Py_ssize_t)(*entry_data_list)->entry_data.data_size); + case MMDB_DATA_TYPE_DOUBLE: + return PyFloat_FromDouble((*entry_data_list)->entry_data.double_value); + case MMDB_DATA_TYPE_FLOAT: + return PyFloat_FromDouble((*entry_data_list)->entry_data.float_value); + case MMDB_DATA_TYPE_UINT16: + return PyLong_FromLong( (*entry_data_list)->entry_data.uint16); + case MMDB_DATA_TYPE_UINT32: + return PyLong_FromLong((*entry_data_list)->entry_data.uint32); + case MMDB_DATA_TYPE_BOOLEAN: + return PyBool_FromLong((*entry_data_list)->entry_data.boolean); + case MMDB_DATA_TYPE_UINT64: + return PyLong_FromUnsignedLongLong( + (*entry_data_list)->entry_data.uint64); + case MMDB_DATA_TYPE_UINT128: + return from_uint128(*entry_data_list); + case MMDB_DATA_TYPE_INT32: + return PyLong_FromLong((*entry_data_list)->entry_data.int32); + default: + PyErr_Format(MaxMindDB_error, + "Invalid data type arguments: %d", + (*entry_data_list)->entry_data.type); + return NULL; + } + return NULL; +} + +static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list) +{ + PyObject *py_obj = PyDict_New(); + if (NULL == py_obj) { + PyErr_NoMemory(); + return NULL; + } + + const uint32_t map_size = (*entry_data_list)->entry_data.data_size; + + uint i; + // entry_data_list cannot start out NULL (see from_entry_data_list). We + // check it in the loop because it may become NULL. + // coverity[check_after_deref] + for (i = 0; i < map_size && entry_data_list; i++) { + *entry_data_list = (*entry_data_list)->next; + + PyObject *key = PyUnicode_FromStringAndSize( + (char *)(*entry_data_list)->entry_data.utf8_string, + (*entry_data_list)->entry_data.data_size + ); + + *entry_data_list = (*entry_data_list)->next; + + PyObject *value = from_entry_data_list(entry_data_list); + if (NULL == value) { + Py_DECREF(key); + Py_DECREF(py_obj); + return NULL; + } + PyDict_SetItem(py_obj, key, value); + Py_DECREF(value); + Py_DECREF(key); + } + + return py_obj; +} + +static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list) +{ + const uint32_t size = (*entry_data_list)->entry_data.data_size; + + PyObject *py_obj = PyList_New(size); + if (NULL == py_obj) { + PyErr_NoMemory(); + return NULL; + } + + uint i; + // entry_data_list cannot start out NULL (see from_entry_data_list). We + // check it in the loop because it may become NULL. + // coverity[check_after_deref] + for (i = 0; i < size && entry_data_list; i++) { + *entry_data_list = (*entry_data_list)->next; + PyObject *value = from_entry_data_list(entry_data_list); + if (NULL == value) { + Py_DECREF(py_obj); + return NULL; + } + // PyList_SetItem 'steals' the reference + PyList_SetItem(py_obj, i, value); + } + return py_obj; +} + +static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list) +{ + uint64_t high = 0; + uint64_t low = 0; +#if MMDB_UINT128_IS_BYTE_ARRAY + int i; + for (i = 0; i < 8; i++) { + high = (high << 8) | entry_data_list->entry_data.uint128[i]; + } + + for (i = 8; i < 16; i++) { + low = (low << 8) | entry_data_list->entry_data.uint128[i]; + } +#else + high = entry_data_list->entry_data.uint128 >> 64; + low = (uint64_t)entry_data_list->entry_data.uint128; +#endif + + char *num_str = malloc(33); + if (NULL == num_str) { + PyErr_NoMemory(); + return NULL; + } + + snprintf(num_str, 33, "%016" PRIX64 "%016" PRIX64, high, low); + + PyObject *py_obj = PyLong_FromString(num_str, NULL, 16); + + free(num_str); + return py_obj; +} + +static PyMethodDef Reader_methods[] = { + { "get", Reader_get, METH_VARARGS, + "Get record for IP address" }, + { "metadata", Reader_metadata, METH_NOARGS, + "Returns metadata object for database" }, + { "close", Reader_close, METH_NOARGS, "Closes database"}, + { NULL, NULL, 0, NULL } +}; + +static PyTypeObject Reader_Type = { + PyVarObject_HEAD_INIT(NULL, 0) + .tp_basicsize = sizeof(Reader_obj), + .tp_dealloc = Reader_dealloc, + .tp_doc = "Reader object", + .tp_flags = Py_TPFLAGS_DEFAULT, + .tp_methods = Reader_methods, + .tp_name = "Reader", + .tp_init = Reader_init, +}; + +static PyMethodDef Metadata_methods[] = { + { NULL, NULL, 0, NULL } +}; + +/* *INDENT-OFF* */ +static PyMemberDef Metadata_members[] = { + { "binary_format_major_version", T_OBJECT, offsetof( + Metadata_obj, binary_format_major_version), READONLY, NULL }, + { "binary_format_minor_version", T_OBJECT, offsetof( + Metadata_obj, binary_format_minor_version), READONLY, NULL }, + { "build_epoch", T_OBJECT, offsetof(Metadata_obj, build_epoch), + READONLY, NULL }, + { "database_type", T_OBJECT, offsetof(Metadata_obj, database_type), + READONLY, NULL }, + { "description", T_OBJECT, offsetof(Metadata_obj, description), + READONLY, NULL }, + { "ip_version", T_OBJECT, offsetof(Metadata_obj, ip_version), + READONLY, NULL }, + { "languages", T_OBJECT, offsetof(Metadata_obj, languages), READONLY, + NULL }, + { "node_count", T_OBJECT, offsetof(Metadata_obj, node_count), + READONLY, NULL }, + { "record_size", T_OBJECT, offsetof(Metadata_obj, record_size), + READONLY, NULL }, + { NULL, 0, 0, 0, NULL } +}; +/* *INDENT-ON* */ + +static PyTypeObject Metadata_Type = { + PyVarObject_HEAD_INIT(NULL, 0) + .tp_basicsize = sizeof(Metadata_obj), + .tp_dealloc = Metadata_dealloc, + .tp_doc = "Metadata object", + .tp_flags = Py_TPFLAGS_DEFAULT, + .tp_members = Metadata_members, + .tp_methods = Metadata_methods, + .tp_name = "Metadata", + .tp_init = Metadata_init +}; + +static PyMethodDef MaxMindDB_methods[] = { + { NULL, NULL, 0, NULL } +}; + + +#if PY_MAJOR_VERSION >= 3 +static struct PyModuleDef MaxMindDB_module = { + PyModuleDef_HEAD_INIT, + .m_name = "extension", + .m_doc = "This is a C extension to read MaxMind DB file format", + .m_methods = MaxMindDB_methods, +}; +#endif + +MOD_INIT(extension){ + PyObject *m; + +#if PY_MAJOR_VERSION >= 3 + m = PyModule_Create(&MaxMindDB_module); +#else + m = Py_InitModule("extension", MaxMindDB_methods); +#endif + + if (!m) { + RETURN_MOD_INIT(NULL); + } + + Reader_Type.tp_new = PyType_GenericNew; + if (PyType_Ready(&Reader_Type)) { + RETURN_MOD_INIT(NULL); + } + Py_INCREF(&Reader_Type); + PyModule_AddObject(m, "Reader", (PyObject *)&Reader_Type); + + Metadata_Type.tp_new = PyType_GenericNew; + if (PyType_Ready(&Metadata_Type)) { + RETURN_MOD_INIT(NULL); + } + PyModule_AddObject(m, "extension", (PyObject *)&Metadata_Type); + + PyObject* error_mod = PyImport_ImportModule("maxminddb.errors"); + if (error_mod == NULL) { + RETURN_MOD_INIT(NULL); + } + + MaxMindDB_error = PyObject_GetAttrString(error_mod, "InvalidDatabaseError"); + Py_DECREF(error_mod); + + if (MaxMindDB_error == NULL) { + RETURN_MOD_INIT(NULL); + } + + Py_INCREF(MaxMindDB_error); + + /* We primarily add it to the module for backwards compatibility */ + PyModule_AddObject(m, "InvalidDatabaseError", MaxMindDB_error); + + RETURN_MOD_INIT(m); +} diff --git a/plugins/Sidebar/maxminddb/file.py b/plugins/Sidebar/maxminddb/file.py new file mode 100644 index 00000000..3460893e --- /dev/null +++ b/plugins/Sidebar/maxminddb/file.py @@ -0,0 +1,65 @@ +"""For internal use only. It provides a slice-like file reader.""" + +import os + +try: + from multiprocessing import Lock +except ImportError: + from threading import Lock + + +class FileBuffer(object): + + """A slice-able file reader""" + + def __init__(self, database): + self._handle = open(database, 'rb') + self._size = os.fstat(self._handle.fileno()).st_size + if not hasattr(os, 'pread'): + self._lock = Lock() + + def __getitem__(self, key): + if isinstance(key, slice): + return self._read(key.stop - key.start, key.start) + elif isinstance(key, int): + return self._read(1, key) + else: + raise TypeError("Invalid argument type.") + + def rfind(self, needle, start): + """Reverse find needle from start""" + pos = self._read(self._size - start - 1, start).rfind(needle) + if pos == -1: + return pos + return start + pos + + def size(self): + """Size of file""" + return self._size + + def close(self): + """Close file""" + self._handle.close() + + if hasattr(os, 'pread'): + + def _read(self, buffersize, offset): + """read that uses pread""" + # pylint: disable=no-member + return os.pread(self._handle.fileno(), buffersize, offset) + + else: + + def _read(self, buffersize, offset): + """read with a lock + + This lock is necessary as after a fork, the different processes + will share the same file table entry, even if we dup the fd, and + as such the same offsets. There does not appear to be a way to + duplicate the file table entry and we cannot re-open based on the + original path as that file may have replaced with another or + unlinked. + """ + with self._lock: + self._handle.seek(offset) + return self._handle.read(buffersize) diff --git a/plugins/Sidebar/maxminddb/ipaddr.py b/plugins/Sidebar/maxminddb/ipaddr.py new file mode 100644 index 00000000..ad27ae9d --- /dev/null +++ b/plugins/Sidebar/maxminddb/ipaddr.py @@ -0,0 +1,1897 @@ +#!/usr/bin/python +# +# Copyright 2007 Google Inc. +# Licensed to PSF under a Contributor Agreement. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""A fast, lightweight IPv4/IPv6 manipulation library in Python. + +This library is used to create/poke/manipulate IPv4 and IPv6 addresses +and networks. + +""" + +__version__ = '2.1.10' + +import struct + +IPV4LENGTH = 32 +IPV6LENGTH = 128 + + +class AddressValueError(ValueError): + """A Value Error related to the address.""" + + +class NetmaskValueError(ValueError): + """A Value Error related to the netmask.""" + + +def IPAddress(address, version=None): + """Take an IP string/int and return an object of the correct type. + + Args: + address: A string or integer, the IP address. Either IPv4 or + IPv6 addresses may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + version: An Integer, 4 or 6. If set, don't try to automatically + determine what the IP address type is. important for things + like IPAddress(1), which could be IPv4, '0.0.0.1', or IPv6, + '::1'. + + Returns: + An IPv4Address or IPv6Address object. + + Raises: + ValueError: if the string passed isn't either a v4 or a v6 + address. + + """ + if version: + if version == 4: + return IPv4Address(address) + elif version == 6: + return IPv6Address(address) + + try: + return IPv4Address(address) + except (AddressValueError, NetmaskValueError): + pass + + try: + return IPv6Address(address) + except (AddressValueError, NetmaskValueError): + pass + + raise ValueError('%r does not appear to be an IPv4 or IPv6 address' % + address) + + +def IPNetwork(address, version=None, strict=False): + """Take an IP string/int and return an object of the correct type. + + Args: + address: A string or integer, the IP address. Either IPv4 or + IPv6 addresses may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + version: An Integer, if set, don't try to automatically + determine what the IP address type is. important for things + like IPNetwork(1), which could be IPv4, '0.0.0.1/32', or IPv6, + '::1/128'. + + Returns: + An IPv4Network or IPv6Network object. + + Raises: + ValueError: if the string passed isn't either a v4 or a v6 + address. Or if a strict network was requested and a strict + network wasn't given. + + """ + if version: + if version == 4: + return IPv4Network(address, strict) + elif version == 6: + return IPv6Network(address, strict) + + try: + return IPv4Network(address, strict) + except (AddressValueError, NetmaskValueError): + pass + + try: + return IPv6Network(address, strict) + except (AddressValueError, NetmaskValueError): + pass + + raise ValueError('%r does not appear to be an IPv4 or IPv6 network' % + address) + + +def v4_int_to_packed(address): + """The binary representation of this address. + + Args: + address: An integer representation of an IPv4 IP address. + + Returns: + The binary representation of this address. + + Raises: + ValueError: If the integer is too large to be an IPv4 IP + address. + """ + if address > _BaseV4._ALL_ONES: + raise ValueError('Address too large for IPv4') + return Bytes(struct.pack('!I', address)) + + +def v6_int_to_packed(address): + """The binary representation of this address. + + Args: + address: An integer representation of an IPv4 IP address. + + Returns: + The binary representation of this address. + """ + return Bytes(struct.pack('!QQ', address >> 64, address & (2**64 - 1))) + + +def _find_address_range(addresses): + """Find a sequence of addresses. + + Args: + addresses: a list of IPv4 or IPv6 addresses. + + Returns: + A tuple containing the first and last IP addresses in the sequence. + + """ + first = last = addresses[0] + for ip in addresses[1:]: + if ip._ip == last._ip + 1: + last = ip + else: + break + return (first, last) + +def _get_prefix_length(number1, number2, bits): + """Get the number of leading bits that are same for two numbers. + + Args: + number1: an integer. + number2: another integer. + bits: the maximum number of bits to compare. + + Returns: + The number of leading bits that are the same for two numbers. + + """ + for i in range(bits): + if number1 >> i == number2 >> i: + return bits - i + return 0 + +def _count_righthand_zero_bits(number, bits): + """Count the number of zero bits on the right hand side. + + Args: + number: an integer. + bits: maximum number of bits to count. + + Returns: + The number of zero bits on the right hand side of the number. + + """ + if number == 0: + return bits + for i in range(bits): + if (number >> i) % 2: + return i + +def summarize_address_range(first, last): + """Summarize a network range given the first and last IP addresses. + + Example: + >>> summarize_address_range(IPv4Address('1.1.1.0'), + IPv4Address('1.1.1.130')) + [IPv4Network('1.1.1.0/25'), IPv4Network('1.1.1.128/31'), + IPv4Network('1.1.1.130/32')] + + Args: + first: the first IPv4Address or IPv6Address in the range. + last: the last IPv4Address or IPv6Address in the range. + + Returns: + The address range collapsed to a list of IPv4Network's or + IPv6Network's. + + Raise: + TypeError: + If the first and last objects are not IP addresses. + If the first and last objects are not the same version. + ValueError: + If the last object is not greater than the first. + If the version is not 4 or 6. + + """ + if not (isinstance(first, _BaseIP) and isinstance(last, _BaseIP)): + raise TypeError('first and last must be IP addresses, not networks') + if first.version != last.version: + raise TypeError("%s and %s are not of the same version" % ( + str(first), str(last))) + if first > last: + raise ValueError('last IP address must be greater than first') + + networks = [] + + if first.version == 4: + ip = IPv4Network + elif first.version == 6: + ip = IPv6Network + else: + raise ValueError('unknown IP version') + + ip_bits = first._max_prefixlen + first_int = first._ip + last_int = last._ip + while first_int <= last_int: + nbits = _count_righthand_zero_bits(first_int, ip_bits) + current = None + while nbits >= 0: + addend = 2**nbits - 1 + current = first_int + addend + nbits -= 1 + if current <= last_int: + break + prefix = _get_prefix_length(first_int, current, ip_bits) + net = ip('%s/%d' % (str(first), prefix)) + networks.append(net) + if current == ip._ALL_ONES: + break + first_int = current + 1 + first = IPAddress(first_int, version=first._version) + return networks + +def _collapse_address_list_recursive(addresses): + """Loops through the addresses, collapsing concurrent netblocks. + + Example: + + ip1 = IPv4Network('1.1.0.0/24') + ip2 = IPv4Network('1.1.1.0/24') + ip3 = IPv4Network('1.1.2.0/24') + ip4 = IPv4Network('1.1.3.0/24') + ip5 = IPv4Network('1.1.4.0/24') + ip6 = IPv4Network('1.1.0.1/22') + + _collapse_address_list_recursive([ip1, ip2, ip3, ip4, ip5, ip6]) -> + [IPv4Network('1.1.0.0/22'), IPv4Network('1.1.4.0/24')] + + This shouldn't be called directly; it is called via + collapse_address_list([]). + + Args: + addresses: A list of IPv4Network's or IPv6Network's + + Returns: + A list of IPv4Network's or IPv6Network's depending on what we were + passed. + + """ + ret_array = [] + optimized = False + + for cur_addr in addresses: + if not ret_array: + ret_array.append(cur_addr) + continue + if cur_addr in ret_array[-1]: + optimized = True + elif cur_addr == ret_array[-1].supernet().subnet()[1]: + ret_array.append(ret_array.pop().supernet()) + optimized = True + else: + ret_array.append(cur_addr) + + if optimized: + return _collapse_address_list_recursive(ret_array) + + return ret_array + + +def collapse_address_list(addresses): + """Collapse a list of IP objects. + + Example: + collapse_address_list([IPv4('1.1.0.0/24'), IPv4('1.1.1.0/24')]) -> + [IPv4('1.1.0.0/23')] + + Args: + addresses: A list of IPv4Network or IPv6Network objects. + + Returns: + A list of IPv4Network or IPv6Network objects depending on what we + were passed. + + Raises: + TypeError: If passed a list of mixed version objects. + + """ + i = 0 + addrs = [] + ips = [] + nets = [] + + # split IP addresses and networks + for ip in addresses: + if isinstance(ip, _BaseIP): + if ips and ips[-1]._version != ip._version: + raise TypeError("%s and %s are not of the same version" % ( + str(ip), str(ips[-1]))) + ips.append(ip) + elif ip._prefixlen == ip._max_prefixlen: + if ips and ips[-1]._version != ip._version: + raise TypeError("%s and %s are not of the same version" % ( + str(ip), str(ips[-1]))) + ips.append(ip.ip) + else: + if nets and nets[-1]._version != ip._version: + raise TypeError("%s and %s are not of the same version" % ( + str(ip), str(ips[-1]))) + nets.append(ip) + + # sort and dedup + ips = sorted(set(ips)) + nets = sorted(set(nets)) + + while i < len(ips): + (first, last) = _find_address_range(ips[i:]) + i = ips.index(last) + 1 + addrs.extend(summarize_address_range(first, last)) + + return _collapse_address_list_recursive(sorted( + addrs + nets, key=_BaseNet._get_networks_key)) + +# backwards compatibility +CollapseAddrList = collapse_address_list + +# We need to distinguish between the string and packed-bytes representations +# of an IP address. For example, b'0::1' is the IPv4 address 48.58.58.49, +# while '0::1' is an IPv6 address. +# +# In Python 3, the native 'bytes' type already provides this functionality, +# so we use it directly. For earlier implementations where bytes is not a +# distinct type, we create a subclass of str to serve as a tag. +# +# Usage example (Python 2): +# ip = ipaddr.IPAddress(ipaddr.Bytes('xxxx')) +# +# Usage example (Python 3): +# ip = ipaddr.IPAddress(b'xxxx') +try: + if bytes is str: + raise TypeError("bytes is not a distinct type") + Bytes = bytes +except (NameError, TypeError): + class Bytes(str): + def __repr__(self): + return 'Bytes(%s)' % str.__repr__(self) + +def get_mixed_type_key(obj): + """Return a key suitable for sorting between networks and addresses. + + Address and Network objects are not sortable by default; they're + fundamentally different so the expression + + IPv4Address('1.1.1.1') <= IPv4Network('1.1.1.1/24') + + doesn't make any sense. There are some times however, where you may wish + to have ipaddr sort these for you anyway. If you need to do this, you + can use this function as the key= argument to sorted(). + + Args: + obj: either a Network or Address object. + Returns: + appropriate key. + + """ + if isinstance(obj, _BaseNet): + return obj._get_networks_key() + elif isinstance(obj, _BaseIP): + return obj._get_address_key() + return NotImplemented + +class _IPAddrBase(object): + + """The mother class.""" + + def __index__(self): + return self._ip + + def __int__(self): + return self._ip + + def __hex__(self): + return hex(self._ip) + + @property + def exploded(self): + """Return the longhand version of the IP address as a string.""" + return self._explode_shorthand_ip_string() + + @property + def compressed(self): + """Return the shorthand version of the IP address as a string.""" + return str(self) + + +class _BaseIP(_IPAddrBase): + + """A generic IP object. + + This IP class contains the version independent methods which are + used by single IP addresses. + + """ + + def __eq__(self, other): + try: + return (self._ip == other._ip + and self._version == other._version) + except AttributeError: + return NotImplemented + + def __ne__(self, other): + eq = self.__eq__(other) + if eq is NotImplemented: + return NotImplemented + return not eq + + def __le__(self, other): + gt = self.__gt__(other) + if gt is NotImplemented: + return NotImplemented + return not gt + + def __ge__(self, other): + lt = self.__lt__(other) + if lt is NotImplemented: + return NotImplemented + return not lt + + def __lt__(self, other): + if self._version != other._version: + raise TypeError('%s and %s are not of the same version' % ( + str(self), str(other))) + if not isinstance(other, _BaseIP): + raise TypeError('%s and %s are not of the same type' % ( + str(self), str(other))) + if self._ip != other._ip: + return self._ip < other._ip + return False + + def __gt__(self, other): + if self._version != other._version: + raise TypeError('%s and %s are not of the same version' % ( + str(self), str(other))) + if not isinstance(other, _BaseIP): + raise TypeError('%s and %s are not of the same type' % ( + str(self), str(other))) + if self._ip != other._ip: + return self._ip > other._ip + return False + + # Shorthand for Integer addition and subtraction. This is not + # meant to ever support addition/subtraction of addresses. + def __add__(self, other): + if not isinstance(other, int): + return NotImplemented + return IPAddress(int(self) + other, version=self._version) + + def __sub__(self, other): + if not isinstance(other, int): + return NotImplemented + return IPAddress(int(self) - other, version=self._version) + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, str(self)) + + def __str__(self): + return '%s' % self._string_from_ip_int(self._ip) + + def __hash__(self): + return hash(hex(long(self._ip))) + + def _get_address_key(self): + return (self._version, self) + + @property + def version(self): + raise NotImplementedError('BaseIP has no version') + + +class _BaseNet(_IPAddrBase): + + """A generic IP object. + + This IP class contains the version independent methods which are + used by networks. + + """ + + def __init__(self, address): + self._cache = {} + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, str(self)) + + def iterhosts(self): + """Generate Iterator over usable hosts in a network. + + This is like __iter__ except it doesn't return the network + or broadcast addresses. + + """ + cur = int(self.network) + 1 + bcast = int(self.broadcast) - 1 + while cur <= bcast: + cur += 1 + yield IPAddress(cur - 1, version=self._version) + + def __iter__(self): + cur = int(self.network) + bcast = int(self.broadcast) + while cur <= bcast: + cur += 1 + yield IPAddress(cur - 1, version=self._version) + + def __getitem__(self, n): + network = int(self.network) + broadcast = int(self.broadcast) + if n >= 0: + if network + n > broadcast: + raise IndexError + return IPAddress(network + n, version=self._version) + else: + n += 1 + if broadcast + n < network: + raise IndexError + return IPAddress(broadcast + n, version=self._version) + + def __lt__(self, other): + if self._version != other._version: + raise TypeError('%s and %s are not of the same version' % ( + str(self), str(other))) + if not isinstance(other, _BaseNet): + raise TypeError('%s and %s are not of the same type' % ( + str(self), str(other))) + if self.network != other.network: + return self.network < other.network + if self.netmask != other.netmask: + return self.netmask < other.netmask + return False + + def __gt__(self, other): + if self._version != other._version: + raise TypeError('%s and %s are not of the same version' % ( + str(self), str(other))) + if not isinstance(other, _BaseNet): + raise TypeError('%s and %s are not of the same type' % ( + str(self), str(other))) + if self.network != other.network: + return self.network > other.network + if self.netmask != other.netmask: + return self.netmask > other.netmask + return False + + def __le__(self, other): + gt = self.__gt__(other) + if gt is NotImplemented: + return NotImplemented + return not gt + + def __ge__(self, other): + lt = self.__lt__(other) + if lt is NotImplemented: + return NotImplemented + return not lt + + def __eq__(self, other): + try: + return (self._version == other._version + and self.network == other.network + and int(self.netmask) == int(other.netmask)) + except AttributeError: + if isinstance(other, _BaseIP): + return (self._version == other._version + and self._ip == other._ip) + + def __ne__(self, other): + eq = self.__eq__(other) + if eq is NotImplemented: + return NotImplemented + return not eq + + def __str__(self): + return '%s/%s' % (str(self.ip), + str(self._prefixlen)) + + def __hash__(self): + return hash(int(self.network) ^ int(self.netmask)) + + def __contains__(self, other): + # always false if one is v4 and the other is v6. + if self._version != other._version: + return False + # dealing with another network. + if isinstance(other, _BaseNet): + return (self.network <= other.network and + self.broadcast >= other.broadcast) + # dealing with another address + else: + return (int(self.network) <= int(other._ip) <= + int(self.broadcast)) + + def overlaps(self, other): + """Tell if self is partly contained in other.""" + return self.network in other or self.broadcast in other or ( + other.network in self or other.broadcast in self) + + @property + def network(self): + x = self._cache.get('network') + if x is None: + x = IPAddress(self._ip & int(self.netmask), version=self._version) + self._cache['network'] = x + return x + + @property + def broadcast(self): + x = self._cache.get('broadcast') + if x is None: + x = IPAddress(self._ip | int(self.hostmask), version=self._version) + self._cache['broadcast'] = x + return x + + @property + def hostmask(self): + x = self._cache.get('hostmask') + if x is None: + x = IPAddress(int(self.netmask) ^ self._ALL_ONES, + version=self._version) + self._cache['hostmask'] = x + return x + + @property + def with_prefixlen(self): + return '%s/%d' % (str(self.ip), self._prefixlen) + + @property + def with_netmask(self): + return '%s/%s' % (str(self.ip), str(self.netmask)) + + @property + def with_hostmask(self): + return '%s/%s' % (str(self.ip), str(self.hostmask)) + + @property + def numhosts(self): + """Number of hosts in the current subnet.""" + return int(self.broadcast) - int(self.network) + 1 + + @property + def version(self): + raise NotImplementedError('BaseNet has no version') + + @property + def prefixlen(self): + return self._prefixlen + + def address_exclude(self, other): + """Remove an address from a larger block. + + For example: + + addr1 = IPNetwork('10.1.1.0/24') + addr2 = IPNetwork('10.1.1.0/26') + addr1.address_exclude(addr2) = + [IPNetwork('10.1.1.64/26'), IPNetwork('10.1.1.128/25')] + + or IPv6: + + addr1 = IPNetwork('::1/32') + addr2 = IPNetwork('::1/128') + addr1.address_exclude(addr2) = [IPNetwork('::0/128'), + IPNetwork('::2/127'), + IPNetwork('::4/126'), + IPNetwork('::8/125'), + ... + IPNetwork('0:0:8000::/33')] + + Args: + other: An IPvXNetwork object of the same type. + + Returns: + A sorted list of IPvXNetwork objects addresses which is self + minus other. + + Raises: + TypeError: If self and other are of difffering address + versions, or if other is not a network object. + ValueError: If other is not completely contained by self. + + """ + if not self._version == other._version: + raise TypeError("%s and %s are not of the same version" % ( + str(self), str(other))) + + if not isinstance(other, _BaseNet): + raise TypeError("%s is not a network object" % str(other)) + + if other not in self: + raise ValueError('%s not contained in %s' % (str(other), + str(self))) + if other == self: + return [] + + ret_addrs = [] + + # Make sure we're comparing the network of other. + other = IPNetwork('%s/%s' % (str(other.network), str(other.prefixlen)), + version=other._version) + + s1, s2 = self.subnet() + while s1 != other and s2 != other: + if other in s1: + ret_addrs.append(s2) + s1, s2 = s1.subnet() + elif other in s2: + ret_addrs.append(s1) + s1, s2 = s2.subnet() + else: + # If we got here, there's a bug somewhere. + assert True == False, ('Error performing exclusion: ' + 's1: %s s2: %s other: %s' % + (str(s1), str(s2), str(other))) + if s1 == other: + ret_addrs.append(s2) + elif s2 == other: + ret_addrs.append(s1) + else: + # If we got here, there's a bug somewhere. + assert True == False, ('Error performing exclusion: ' + 's1: %s s2: %s other: %s' % + (str(s1), str(s2), str(other))) + + return sorted(ret_addrs, key=_BaseNet._get_networks_key) + + def compare_networks(self, other): + """Compare two IP objects. + + This is only concerned about the comparison of the integer + representation of the network addresses. This means that the + host bits aren't considered at all in this method. If you want + to compare host bits, you can easily enough do a + 'HostA._ip < HostB._ip' + + Args: + other: An IP object. + + Returns: + If the IP versions of self and other are the same, returns: + + -1 if self < other: + eg: IPv4('1.1.1.0/24') < IPv4('1.1.2.0/24') + IPv6('1080::200C:417A') < IPv6('1080::200B:417B') + 0 if self == other + eg: IPv4('1.1.1.1/24') == IPv4('1.1.1.2/24') + IPv6('1080::200C:417A/96') == IPv6('1080::200C:417B/96') + 1 if self > other + eg: IPv4('1.1.1.0/24') > IPv4('1.1.0.0/24') + IPv6('1080::1:200C:417A/112') > + IPv6('1080::0:200C:417A/112') + + If the IP versions of self and other are different, returns: + + -1 if self._version < other._version + eg: IPv4('10.0.0.1/24') < IPv6('::1/128') + 1 if self._version > other._version + eg: IPv6('::1/128') > IPv4('255.255.255.0/24') + + """ + if self._version < other._version: + return -1 + if self._version > other._version: + return 1 + # self._version == other._version below here: + if self.network < other.network: + return -1 + if self.network > other.network: + return 1 + # self.network == other.network below here: + if self.netmask < other.netmask: + return -1 + if self.netmask > other.netmask: + return 1 + # self.network == other.network and self.netmask == other.netmask + return 0 + + def _get_networks_key(self): + """Network-only key function. + + Returns an object that identifies this address' network and + netmask. This function is a suitable "key" argument for sorted() + and list.sort(). + + """ + return (self._version, self.network, self.netmask) + + def _ip_int_from_prefix(self, prefixlen=None): + """Turn the prefix length netmask into a int for comparison. + + Args: + prefixlen: An integer, the prefix length. + + Returns: + An integer. + + """ + if not prefixlen and prefixlen != 0: + prefixlen = self._prefixlen + return self._ALL_ONES ^ (self._ALL_ONES >> prefixlen) + + def _prefix_from_ip_int(self, ip_int, mask=32): + """Return prefix length from the decimal netmask. + + Args: + ip_int: An integer, the IP address. + mask: The netmask. Defaults to 32. + + Returns: + An integer, the prefix length. + + """ + while mask: + if ip_int & 1 == 1: + break + ip_int >>= 1 + mask -= 1 + + return mask + + def _ip_string_from_prefix(self, prefixlen=None): + """Turn a prefix length into a dotted decimal string. + + Args: + prefixlen: An integer, the netmask prefix length. + + Returns: + A string, the dotted decimal netmask string. + + """ + if not prefixlen: + prefixlen = self._prefixlen + return self._string_from_ip_int(self._ip_int_from_prefix(prefixlen)) + + def iter_subnets(self, prefixlen_diff=1, new_prefix=None): + """The subnets which join to make the current subnet. + + In the case that self contains only one IP + (self._prefixlen == 32 for IPv4 or self._prefixlen == 128 + for IPv6), return a list with just ourself. + + Args: + prefixlen_diff: An integer, the amount the prefix length + should be increased by. This should not be set if + new_prefix is also set. + new_prefix: The desired new prefix length. This must be a + larger number (smaller prefix) than the existing prefix. + This should not be set if prefixlen_diff is also set. + + Returns: + An iterator of IPv(4|6) objects. + + Raises: + ValueError: The prefixlen_diff is too small or too large. + OR + prefixlen_diff and new_prefix are both set or new_prefix + is a smaller number than the current prefix (smaller + number means a larger network) + + """ + if self._prefixlen == self._max_prefixlen: + yield self + return + + if new_prefix is not None: + if new_prefix < self._prefixlen: + raise ValueError('new prefix must be longer') + if prefixlen_diff != 1: + raise ValueError('cannot set prefixlen_diff and new_prefix') + prefixlen_diff = new_prefix - self._prefixlen + + if prefixlen_diff < 0: + raise ValueError('prefix length diff must be > 0') + new_prefixlen = self._prefixlen + prefixlen_diff + + if not self._is_valid_netmask(str(new_prefixlen)): + raise ValueError( + 'prefix length diff %d is invalid for netblock %s' % ( + new_prefixlen, str(self))) + + first = IPNetwork('%s/%s' % (str(self.network), + str(self._prefixlen + prefixlen_diff)), + version=self._version) + + yield first + current = first + while True: + broadcast = current.broadcast + if broadcast == self.broadcast: + return + new_addr = IPAddress(int(broadcast) + 1, version=self._version) + current = IPNetwork('%s/%s' % (str(new_addr), str(new_prefixlen)), + version=self._version) + + yield current + + def masked(self): + """Return the network object with the host bits masked out.""" + return IPNetwork('%s/%d' % (self.network, self._prefixlen), + version=self._version) + + def subnet(self, prefixlen_diff=1, new_prefix=None): + """Return a list of subnets, rather than an iterator.""" + return list(self.iter_subnets(prefixlen_diff, new_prefix)) + + def supernet(self, prefixlen_diff=1, new_prefix=None): + """The supernet containing the current network. + + Args: + prefixlen_diff: An integer, the amount the prefix length of + the network should be decreased by. For example, given a + /24 network and a prefixlen_diff of 3, a supernet with a + /21 netmask is returned. + + Returns: + An IPv4 network object. + + Raises: + ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have a + negative prefix length. + OR + If prefixlen_diff and new_prefix are both set or new_prefix is a + larger number than the current prefix (larger number means a + smaller network) + + """ + if self._prefixlen == 0: + return self + + if new_prefix is not None: + if new_prefix > self._prefixlen: + raise ValueError('new prefix must be shorter') + if prefixlen_diff != 1: + raise ValueError('cannot set prefixlen_diff and new_prefix') + prefixlen_diff = self._prefixlen - new_prefix + + + if self.prefixlen - prefixlen_diff < 0: + raise ValueError( + 'current prefixlen is %d, cannot have a prefixlen_diff of %d' % + (self.prefixlen, prefixlen_diff)) + return IPNetwork('%s/%s' % (str(self.network), + str(self.prefixlen - prefixlen_diff)), + version=self._version) + + # backwards compatibility + Subnet = subnet + Supernet = supernet + AddressExclude = address_exclude + CompareNetworks = compare_networks + Contains = __contains__ + + +class _BaseV4(object): + + """Base IPv4 object. + + The following methods are used by IPv4 objects in both single IP + addresses and networks. + + """ + + # Equivalent to 255.255.255.255 or 32 bits of 1's. + _ALL_ONES = (2**IPV4LENGTH) - 1 + _DECIMAL_DIGITS = frozenset('0123456789') + + def __init__(self, address): + self._version = 4 + self._max_prefixlen = IPV4LENGTH + + def _explode_shorthand_ip_string(self): + return str(self) + + def _ip_int_from_string(self, ip_str): + """Turn the given IP string into an integer for comparison. + + Args: + ip_str: A string, the IP ip_str. + + Returns: + The IP ip_str as an integer. + + Raises: + AddressValueError: if ip_str isn't a valid IPv4 Address. + + """ + octets = ip_str.split('.') + if len(octets) != 4: + raise AddressValueError(ip_str) + + packed_ip = 0 + for oc in octets: + try: + packed_ip = (packed_ip << 8) | self._parse_octet(oc) + except ValueError: + raise AddressValueError(ip_str) + return packed_ip + + def _parse_octet(self, octet_str): + """Convert a decimal octet into an integer. + + Args: + octet_str: A string, the number to parse. + + Returns: + The octet as an integer. + + Raises: + ValueError: if the octet isn't strictly a decimal from [0..255]. + + """ + # Whitelist the characters, since int() allows a lot of bizarre stuff. + if not self._DECIMAL_DIGITS.issuperset(octet_str): + raise ValueError + octet_int = int(octet_str, 10) + # Disallow leading zeroes, because no clear standard exists on + # whether these should be interpreted as decimal or octal. + if octet_int > 255 or (octet_str[0] == '0' and len(octet_str) > 1): + raise ValueError + return octet_int + + def _string_from_ip_int(self, ip_int): + """Turns a 32-bit integer into dotted decimal notation. + + Args: + ip_int: An integer, the IP address. + + Returns: + The IP address as a string in dotted decimal notation. + + """ + octets = [] + for _ in xrange(4): + octets.insert(0, str(ip_int & 0xFF)) + ip_int >>= 8 + return '.'.join(octets) + + @property + def max_prefixlen(self): + return self._max_prefixlen + + @property + def packed(self): + """The binary representation of this address.""" + return v4_int_to_packed(self._ip) + + @property + def version(self): + return self._version + + @property + def is_reserved(self): + """Test if the address is otherwise IETF reserved. + + Returns: + A boolean, True if the address is within the + reserved IPv4 Network range. + + """ + return self in IPv4Network('240.0.0.0/4') + + @property + def is_private(self): + """Test if this address is allocated for private networks. + + Returns: + A boolean, True if the address is reserved per RFC 1918. + + """ + return (self in IPv4Network('10.0.0.0/8') or + self in IPv4Network('172.16.0.0/12') or + self in IPv4Network('192.168.0.0/16')) + + @property + def is_multicast(self): + """Test if the address is reserved for multicast use. + + Returns: + A boolean, True if the address is multicast. + See RFC 3171 for details. + + """ + return self in IPv4Network('224.0.0.0/4') + + @property + def is_unspecified(self): + """Test if the address is unspecified. + + Returns: + A boolean, True if this is the unspecified address as defined in + RFC 5735 3. + + """ + return self in IPv4Network('0.0.0.0') + + @property + def is_loopback(self): + """Test if the address is a loopback address. + + Returns: + A boolean, True if the address is a loopback per RFC 3330. + + """ + return self in IPv4Network('127.0.0.0/8') + + @property + def is_link_local(self): + """Test if the address is reserved for link-local. + + Returns: + A boolean, True if the address is link-local per RFC 3927. + + """ + return self in IPv4Network('169.254.0.0/16') + + +class IPv4Address(_BaseV4, _BaseIP): + + """Represent and manipulate single IPv4 Addresses.""" + + def __init__(self, address): + + """ + Args: + address: A string or integer representing the IP + '192.168.1.1' + + Additionally, an integer can be passed, so + IPv4Address('192.168.1.1') == IPv4Address(3232235777). + or, more generally + IPv4Address(int(IPv4Address('192.168.1.1'))) == + IPv4Address('192.168.1.1') + + Raises: + AddressValueError: If ipaddr isn't a valid IPv4 address. + + """ + _BaseV4.__init__(self, address) + + # Efficient constructor from integer. + if isinstance(address, (int, long)): + self._ip = address + if address < 0 or address > self._ALL_ONES: + raise AddressValueError(address) + return + + # Constructing from a packed address + if isinstance(address, Bytes): + try: + self._ip, = struct.unpack('!I', address) + except struct.error: + raise AddressValueError(address) # Wrong length. + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP string. + addr_str = str(address) + self._ip = self._ip_int_from_string(addr_str) + + +class IPv4Network(_BaseV4, _BaseNet): + + """This class represents and manipulates 32-bit IPv4 networks. + + Attributes: [examples for IPv4Network('1.2.3.4/27')] + ._ip: 16909060 + .ip: IPv4Address('1.2.3.4') + .network: IPv4Address('1.2.3.0') + .hostmask: IPv4Address('0.0.0.31') + .broadcast: IPv4Address('1.2.3.31') + .netmask: IPv4Address('255.255.255.224') + .prefixlen: 27 + + """ + + # the valid octets for host and netmasks. only useful for IPv4. + _valid_mask_octets = set((255, 254, 252, 248, 240, 224, 192, 128, 0)) + + def __init__(self, address, strict=False): + """Instantiate a new IPv4 network object. + + Args: + address: A string or integer representing the IP [& network]. + '192.168.1.1/24' + '192.168.1.1/255.255.255.0' + '192.168.1.1/0.0.0.255' + are all functionally the same in IPv4. Similarly, + '192.168.1.1' + '192.168.1.1/255.255.255.255' + '192.168.1.1/32' + are also functionaly equivalent. That is to say, failing to + provide a subnetmask will create an object with a mask of /32. + + If the mask (portion after the / in the argument) is given in + dotted quad form, it is treated as a netmask if it starts with a + non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it + starts with a zero field (e.g. 0.255.255.255 == /8), with the + single exception of an all-zero mask which is treated as a + netmask == /0. If no mask is given, a default of /32 is used. + + Additionally, an integer can be passed, so + IPv4Network('192.168.1.1') == IPv4Network(3232235777). + or, more generally + IPv4Network(int(IPv4Network('192.168.1.1'))) == + IPv4Network('192.168.1.1') + + strict: A boolean. If true, ensure that we have been passed + A true network address, eg, 192.168.1.0/24 and not an + IP address on a network, eg, 192.168.1.1/24. + + Raises: + AddressValueError: If ipaddr isn't a valid IPv4 address. + NetmaskValueError: If the netmask isn't valid for + an IPv4 address. + ValueError: If strict was True and a network address was not + supplied. + + """ + _BaseNet.__init__(self, address) + _BaseV4.__init__(self, address) + + # Constructing from an integer or packed bytes. + if isinstance(address, (int, long, Bytes)): + self.ip = IPv4Address(address) + self._ip = self.ip._ip + self._prefixlen = self._max_prefixlen + self.netmask = IPv4Address(self._ALL_ONES) + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP prefix string. + addr = str(address).split('/') + + if len(addr) > 2: + raise AddressValueError(address) + + self._ip = self._ip_int_from_string(addr[0]) + self.ip = IPv4Address(self._ip) + + if len(addr) == 2: + mask = addr[1].split('.') + if len(mask) == 4: + # We have dotted decimal netmask. + if self._is_valid_netmask(addr[1]): + self.netmask = IPv4Address(self._ip_int_from_string( + addr[1])) + elif self._is_hostmask(addr[1]): + self.netmask = IPv4Address( + self._ip_int_from_string(addr[1]) ^ self._ALL_ONES) + else: + raise NetmaskValueError('%s is not a valid netmask' + % addr[1]) + + self._prefixlen = self._prefix_from_ip_int(int(self.netmask)) + else: + # We have a netmask in prefix length form. + if not self._is_valid_netmask(addr[1]): + raise NetmaskValueError(addr[1]) + self._prefixlen = int(addr[1]) + self.netmask = IPv4Address(self._ip_int_from_prefix( + self._prefixlen)) + else: + self._prefixlen = self._max_prefixlen + self.netmask = IPv4Address(self._ip_int_from_prefix( + self._prefixlen)) + if strict: + if self.ip != self.network: + raise ValueError('%s has host bits set' % + self.ip) + if self._prefixlen == (self._max_prefixlen - 1): + self.iterhosts = self.__iter__ + + def _is_hostmask(self, ip_str): + """Test if the IP string is a hostmask (rather than a netmask). + + Args: + ip_str: A string, the potential hostmask. + + Returns: + A boolean, True if the IP string is a hostmask. + + """ + bits = ip_str.split('.') + try: + parts = [int(x) for x in bits if int(x) in self._valid_mask_octets] + except ValueError: + return False + if len(parts) != len(bits): + return False + if parts[0] < parts[-1]: + return True + return False + + def _is_valid_netmask(self, netmask): + """Verify that the netmask is valid. + + Args: + netmask: A string, either a prefix or dotted decimal + netmask. + + Returns: + A boolean, True if the prefix represents a valid IPv4 + netmask. + + """ + mask = netmask.split('.') + if len(mask) == 4: + if [x for x in mask if int(x) not in self._valid_mask_octets]: + return False + if [y for idx, y in enumerate(mask) if idx > 0 and + y > mask[idx - 1]]: + return False + return True + try: + netmask = int(netmask) + except ValueError: + return False + return 0 <= netmask <= self._max_prefixlen + + # backwards compatibility + IsRFC1918 = lambda self: self.is_private + IsMulticast = lambda self: self.is_multicast + IsLoopback = lambda self: self.is_loopback + IsLinkLocal = lambda self: self.is_link_local + + +class _BaseV6(object): + + """Base IPv6 object. + + The following methods are used by IPv6 objects in both single IP + addresses and networks. + + """ + + _ALL_ONES = (2**IPV6LENGTH) - 1 + _HEXTET_COUNT = 8 + _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef') + + def __init__(self, address): + self._version = 6 + self._max_prefixlen = IPV6LENGTH + + def _ip_int_from_string(self, ip_str): + """Turn an IPv6 ip_str into an integer. + + Args: + ip_str: A string, the IPv6 ip_str. + + Returns: + A long, the IPv6 ip_str. + + Raises: + AddressValueError: if ip_str isn't a valid IPv6 Address. + + """ + parts = ip_str.split(':') + + # An IPv6 address needs at least 2 colons (3 parts). + if len(parts) < 3: + raise AddressValueError(ip_str) + + # If the address has an IPv4-style suffix, convert it to hexadecimal. + if '.' in parts[-1]: + ipv4_int = IPv4Address(parts.pop())._ip + parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF)) + parts.append('%x' % (ipv4_int & 0xFFFF)) + + # An IPv6 address can't have more than 8 colons (9 parts). + if len(parts) > self._HEXTET_COUNT + 1: + raise AddressValueError(ip_str) + + # Disregarding the endpoints, find '::' with nothing in between. + # This indicates that a run of zeroes has been skipped. + try: + skip_index, = ( + [i for i in xrange(1, len(parts) - 1) if not parts[i]] or + [None]) + except ValueError: + # Can't have more than one '::' + raise AddressValueError(ip_str) + + # parts_hi is the number of parts to copy from above/before the '::' + # parts_lo is the number of parts to copy from below/after the '::' + if skip_index is not None: + # If we found a '::', then check if it also covers the endpoints. + parts_hi = skip_index + parts_lo = len(parts) - skip_index - 1 + if not parts[0]: + parts_hi -= 1 + if parts_hi: + raise AddressValueError(ip_str) # ^: requires ^:: + if not parts[-1]: + parts_lo -= 1 + if parts_lo: + raise AddressValueError(ip_str) # :$ requires ::$ + parts_skipped = self._HEXTET_COUNT - (parts_hi + parts_lo) + if parts_skipped < 1: + raise AddressValueError(ip_str) + else: + # Otherwise, allocate the entire address to parts_hi. The endpoints + # could still be empty, but _parse_hextet() will check for that. + if len(parts) != self._HEXTET_COUNT: + raise AddressValueError(ip_str) + parts_hi = len(parts) + parts_lo = 0 + parts_skipped = 0 + + try: + # Now, parse the hextets into a 128-bit integer. + ip_int = 0L + for i in xrange(parts_hi): + ip_int <<= 16 + ip_int |= self._parse_hextet(parts[i]) + ip_int <<= 16 * parts_skipped + for i in xrange(-parts_lo, 0): + ip_int <<= 16 + ip_int |= self._parse_hextet(parts[i]) + return ip_int + except ValueError: + raise AddressValueError(ip_str) + + def _parse_hextet(self, hextet_str): + """Convert an IPv6 hextet string into an integer. + + Args: + hextet_str: A string, the number to parse. + + Returns: + The hextet as an integer. + + Raises: + ValueError: if the input isn't strictly a hex number from [0..FFFF]. + + """ + # Whitelist the characters, since int() allows a lot of bizarre stuff. + if not self._HEX_DIGITS.issuperset(hextet_str): + raise ValueError + hextet_int = int(hextet_str, 16) + if hextet_int > 0xFFFF: + raise ValueError + return hextet_int + + def _compress_hextets(self, hextets): + """Compresses a list of hextets. + + Compresses a list of strings, replacing the longest continuous + sequence of "0" in the list with "" and adding empty strings at + the beginning or at the end of the string such that subsequently + calling ":".join(hextets) will produce the compressed version of + the IPv6 address. + + Args: + hextets: A list of strings, the hextets to compress. + + Returns: + A list of strings. + + """ + best_doublecolon_start = -1 + best_doublecolon_len = 0 + doublecolon_start = -1 + doublecolon_len = 0 + for index in range(len(hextets)): + if hextets[index] == '0': + doublecolon_len += 1 + if doublecolon_start == -1: + # Start of a sequence of zeros. + doublecolon_start = index + if doublecolon_len > best_doublecolon_len: + # This is the longest sequence of zeros so far. + best_doublecolon_len = doublecolon_len + best_doublecolon_start = doublecolon_start + else: + doublecolon_len = 0 + doublecolon_start = -1 + + if best_doublecolon_len > 1: + best_doublecolon_end = (best_doublecolon_start + + best_doublecolon_len) + # For zeros at the end of the address. + if best_doublecolon_end == len(hextets): + hextets += [''] + hextets[best_doublecolon_start:best_doublecolon_end] = [''] + # For zeros at the beginning of the address. + if best_doublecolon_start == 0: + hextets = [''] + hextets + + return hextets + + def _string_from_ip_int(self, ip_int=None): + """Turns a 128-bit integer into hexadecimal notation. + + Args: + ip_int: An integer, the IP address. + + Returns: + A string, the hexadecimal representation of the address. + + Raises: + ValueError: The address is bigger than 128 bits of all ones. + + """ + if not ip_int and ip_int != 0: + ip_int = int(self._ip) + + if ip_int > self._ALL_ONES: + raise ValueError('IPv6 address is too large') + + hex_str = '%032x' % ip_int + hextets = [] + for x in range(0, 32, 4): + hextets.append('%x' % int(hex_str[x:x+4], 16)) + + hextets = self._compress_hextets(hextets) + return ':'.join(hextets) + + def _explode_shorthand_ip_string(self): + """Expand a shortened IPv6 address. + + Args: + ip_str: A string, the IPv6 address. + + Returns: + A string, the expanded IPv6 address. + + """ + if isinstance(self, _BaseNet): + ip_str = str(self.ip) + else: + ip_str = str(self) + + ip_int = self._ip_int_from_string(ip_str) + parts = [] + for i in xrange(self._HEXTET_COUNT): + parts.append('%04x' % (ip_int & 0xFFFF)) + ip_int >>= 16 + parts.reverse() + if isinstance(self, _BaseNet): + return '%s/%d' % (':'.join(parts), self.prefixlen) + return ':'.join(parts) + + @property + def max_prefixlen(self): + return self._max_prefixlen + + @property + def packed(self): + """The binary representation of this address.""" + return v6_int_to_packed(self._ip) + + @property + def version(self): + return self._version + + @property + def is_multicast(self): + """Test if the address is reserved for multicast use. + + Returns: + A boolean, True if the address is a multicast address. + See RFC 2373 2.7 for details. + + """ + return self in IPv6Network('ff00::/8') + + @property + def is_reserved(self): + """Test if the address is otherwise IETF reserved. + + Returns: + A boolean, True if the address is within one of the + reserved IPv6 Network ranges. + + """ + return (self in IPv6Network('::/8') or + self in IPv6Network('100::/8') or + self in IPv6Network('200::/7') or + self in IPv6Network('400::/6') or + self in IPv6Network('800::/5') or + self in IPv6Network('1000::/4') or + self in IPv6Network('4000::/3') or + self in IPv6Network('6000::/3') or + self in IPv6Network('8000::/3') or + self in IPv6Network('A000::/3') or + self in IPv6Network('C000::/3') or + self in IPv6Network('E000::/4') or + self in IPv6Network('F000::/5') or + self in IPv6Network('F800::/6') or + self in IPv6Network('FE00::/9')) + + @property + def is_unspecified(self): + """Test if the address is unspecified. + + Returns: + A boolean, True if this is the unspecified address as defined in + RFC 2373 2.5.2. + + """ + return self._ip == 0 and getattr(self, '_prefixlen', 128) == 128 + + @property + def is_loopback(self): + """Test if the address is a loopback address. + + Returns: + A boolean, True if the address is a loopback address as defined in + RFC 2373 2.5.3. + + """ + return self._ip == 1 and getattr(self, '_prefixlen', 128) == 128 + + @property + def is_link_local(self): + """Test if the address is reserved for link-local. + + Returns: + A boolean, True if the address is reserved per RFC 4291. + + """ + return self in IPv6Network('fe80::/10') + + @property + def is_site_local(self): + """Test if the address is reserved for site-local. + + Note that the site-local address space has been deprecated by RFC 3879. + Use is_private to test if this address is in the space of unique local + addresses as defined by RFC 4193. + + Returns: + A boolean, True if the address is reserved per RFC 3513 2.5.6. + + """ + return self in IPv6Network('fec0::/10') + + @property + def is_private(self): + """Test if this address is allocated for private networks. + + Returns: + A boolean, True if the address is reserved per RFC 4193. + + """ + return self in IPv6Network('fc00::/7') + + @property + def ipv4_mapped(self): + """Return the IPv4 mapped address. + + Returns: + If the IPv6 address is a v4 mapped address, return the + IPv4 mapped address. Return None otherwise. + + """ + if (self._ip >> 32) != 0xFFFF: + return None + return IPv4Address(self._ip & 0xFFFFFFFF) + + @property + def teredo(self): + """Tuple of embedded teredo IPs. + + Returns: + Tuple of the (server, client) IPs or None if the address + doesn't appear to be a teredo address (doesn't start with + 2001::/32) + + """ + if (self._ip >> 96) != 0x20010000: + return None + return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF), + IPv4Address(~self._ip & 0xFFFFFFFF)) + + @property + def sixtofour(self): + """Return the IPv4 6to4 embedded address. + + Returns: + The IPv4 6to4-embedded address if present or None if the + address doesn't appear to contain a 6to4 embedded address. + + """ + if (self._ip >> 112) != 0x2002: + return None + return IPv4Address((self._ip >> 80) & 0xFFFFFFFF) + + +class IPv6Address(_BaseV6, _BaseIP): + + """Represent and manipulate single IPv6 Addresses. + """ + + def __init__(self, address): + """Instantiate a new IPv6 address object. + + Args: + address: A string or integer representing the IP + + Additionally, an integer can be passed, so + IPv6Address('2001:4860::') == + IPv6Address(42541956101370907050197289607612071936L). + or, more generally + IPv6Address(IPv6Address('2001:4860::')._ip) == + IPv6Address('2001:4860::') + + Raises: + AddressValueError: If address isn't a valid IPv6 address. + + """ + _BaseV6.__init__(self, address) + + # Efficient constructor from integer. + if isinstance(address, (int, long)): + self._ip = address + if address < 0 or address > self._ALL_ONES: + raise AddressValueError(address) + return + + # Constructing from a packed address + if isinstance(address, Bytes): + try: + hi, lo = struct.unpack('!QQ', address) + except struct.error: + raise AddressValueError(address) # Wrong length. + self._ip = (hi << 64) | lo + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP string. + addr_str = str(address) + if not addr_str: + raise AddressValueError('') + + self._ip = self._ip_int_from_string(addr_str) + + +class IPv6Network(_BaseV6, _BaseNet): + + """This class represents and manipulates 128-bit IPv6 networks. + + Attributes: [examples for IPv6('2001:658:22A:CAFE:200::1/64')] + .ip: IPv6Address('2001:658:22a:cafe:200::1') + .network: IPv6Address('2001:658:22a:cafe::') + .hostmask: IPv6Address('::ffff:ffff:ffff:ffff') + .broadcast: IPv6Address('2001:658:22a:cafe:ffff:ffff:ffff:ffff') + .netmask: IPv6Address('ffff:ffff:ffff:ffff::') + .prefixlen: 64 + + """ + + + def __init__(self, address, strict=False): + """Instantiate a new IPv6 Network object. + + Args: + address: A string or integer representing the IPv6 network or the IP + and prefix/netmask. + '2001:4860::/128' + '2001:4860:0000:0000:0000:0000:0000:0000/128' + '2001:4860::' + are all functionally the same in IPv6. That is to say, + failing to provide a subnetmask will create an object with + a mask of /128. + + Additionally, an integer can be passed, so + IPv6Network('2001:4860::') == + IPv6Network(42541956101370907050197289607612071936L). + or, more generally + IPv6Network(IPv6Network('2001:4860::')._ip) == + IPv6Network('2001:4860::') + + strict: A boolean. If true, ensure that we have been passed + A true network address, eg, 192.168.1.0/24 and not an + IP address on a network, eg, 192.168.1.1/24. + + Raises: + AddressValueError: If address isn't a valid IPv6 address. + NetmaskValueError: If the netmask isn't valid for + an IPv6 address. + ValueError: If strict was True and a network address was not + supplied. + + """ + _BaseNet.__init__(self, address) + _BaseV6.__init__(self, address) + + # Constructing from an integer or packed bytes. + if isinstance(address, (int, long, Bytes)): + self.ip = IPv6Address(address) + self._ip = self.ip._ip + self._prefixlen = self._max_prefixlen + self.netmask = IPv6Address(self._ALL_ONES) + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP prefix string. + addr = str(address).split('/') + + if len(addr) > 2: + raise AddressValueError(address) + + self._ip = self._ip_int_from_string(addr[0]) + self.ip = IPv6Address(self._ip) + + if len(addr) == 2: + if self._is_valid_netmask(addr[1]): + self._prefixlen = int(addr[1]) + else: + raise NetmaskValueError(addr[1]) + else: + self._prefixlen = self._max_prefixlen + + self.netmask = IPv6Address(self._ip_int_from_prefix(self._prefixlen)) + + if strict: + if self.ip != self.network: + raise ValueError('%s has host bits set' % + self.ip) + if self._prefixlen == (self._max_prefixlen - 1): + self.iterhosts = self.__iter__ + + def _is_valid_netmask(self, prefixlen): + """Verify that the netmask/prefixlen is valid. + + Args: + prefixlen: A string, the netmask in prefix length format. + + Returns: + A boolean, True if the prefix represents a valid IPv6 + netmask. + + """ + try: + prefixlen = int(prefixlen) + except ValueError: + return False + return 0 <= prefixlen <= self._max_prefixlen + + @property + def with_netmask(self): + return self.with_prefixlen diff --git a/plugins/Sidebar/maxminddb/reader.py b/plugins/Sidebar/maxminddb/reader.py new file mode 100644 index 00000000..5ecfbdf2 --- /dev/null +++ b/plugins/Sidebar/maxminddb/reader.py @@ -0,0 +1,221 @@ +""" +maxminddb.reader +~~~~~~~~~~~~~~~~ + +This module contains the pure Python database reader and related classes. + +""" +from __future__ import unicode_literals + +try: + import mmap +except ImportError: + # pylint: disable=invalid-name + mmap = None + +import struct + +from maxminddb.compat import byte_from_int, int_from_byte, ipaddress +from maxminddb.const import MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY +from maxminddb.decoder import Decoder +from maxminddb.errors import InvalidDatabaseError +from maxminddb.file import FileBuffer + + +class Reader(object): + + """ + Instances of this class provide a reader for the MaxMind DB format. IP + addresses can be looked up using the ``get`` method. + """ + + _DATA_SECTION_SEPARATOR_SIZE = 16 + _METADATA_START_MARKER = b"\xAB\xCD\xEFMaxMind.com" + + _ipv4_start = None + + def __init__(self, database, mode=MODE_AUTO): + """Reader for the MaxMind DB file format + + Arguments: + database -- A path to a valid MaxMind DB file such as a GeoIP2 + database file. + mode -- mode to open the database with. Valid mode are: + * MODE_MMAP - read from memory map. + * MODE_FILE - read database as standard file. + * MODE_MEMORY - load database into memory. + * MODE_AUTO - tries MODE_MMAP and then MODE_FILE. Default. + """ + if (mode == MODE_AUTO and mmap) or mode == MODE_MMAP: + with open(database, 'rb') as db_file: + self._buffer = mmap.mmap( + db_file.fileno(), 0, access=mmap.ACCESS_READ) + self._buffer_size = self._buffer.size() + elif mode in (MODE_AUTO, MODE_FILE): + self._buffer = FileBuffer(database) + self._buffer_size = self._buffer.size() + elif mode == MODE_MEMORY: + with open(database, 'rb') as db_file: + self._buffer = db_file.read() + self._buffer_size = len(self._buffer) + else: + raise ValueError('Unsupported open mode ({0}). Only MODE_AUTO, ' + ' MODE_FILE, and MODE_MEMORY are support by the pure Python ' + 'Reader'.format(mode)) + + metadata_start = self._buffer.rfind(self._METADATA_START_MARKER, + max(0, self._buffer_size + - 128 * 1024)) + + if metadata_start == -1: + self.close() + raise InvalidDatabaseError('Error opening database file ({0}). ' + 'Is this a valid MaxMind DB file?' + ''.format(database)) + + metadata_start += len(self._METADATA_START_MARKER) + metadata_decoder = Decoder(self._buffer, metadata_start) + (metadata, _) = metadata_decoder.decode(metadata_start) + self._metadata = Metadata( + **metadata) # pylint: disable=bad-option-value + + self._decoder = Decoder(self._buffer, self._metadata.search_tree_size + + self._DATA_SECTION_SEPARATOR_SIZE) + + def metadata(self): + """Return the metadata associated with the MaxMind DB file""" + return self._metadata + + def get(self, ip_address): + """Return the record for the ip_address in the MaxMind DB + + + Arguments: + ip_address -- an IP address in the standard string notation + """ + address = ipaddress.ip_address(ip_address) + + if address.version == 6 and self._metadata.ip_version == 4: + raise ValueError('Error looking up {0}. You attempted to look up ' + 'an IPv6 address in an IPv4-only database.'.format( + ip_address)) + pointer = self._find_address_in_tree(address) + + return self._resolve_data_pointer(pointer) if pointer else None + + def _find_address_in_tree(self, ip_address): + packed = ip_address.packed + + bit_count = len(packed) * 8 + node = self._start_node(bit_count) + + for i in range(bit_count): + if node >= self._metadata.node_count: + break + bit = 1 & (int_from_byte(packed[i >> 3]) >> 7 - (i % 8)) + node = self._read_node(node, bit) + if node == self._metadata.node_count: + # Record is empty + return 0 + elif node > self._metadata.node_count: + return node + + raise InvalidDatabaseError('Invalid node in search tree') + + def _start_node(self, length): + if self._metadata.ip_version != 6 or length == 128: + return 0 + + # We are looking up an IPv4 address in an IPv6 tree. Skip over the + # first 96 nodes. + if self._ipv4_start: + return self._ipv4_start + + node = 0 + for _ in range(96): + if node >= self._metadata.node_count: + break + node = self._read_node(node, 0) + self._ipv4_start = node + return node + + def _read_node(self, node_number, index): + base_offset = node_number * self._metadata.node_byte_size + + record_size = self._metadata.record_size + if record_size == 24: + offset = base_offset + index * 3 + node_bytes = b'\x00' + self._buffer[offset:offset + 3] + elif record_size == 28: + (middle,) = struct.unpack( + b'!B', self._buffer[base_offset + 3:base_offset + 4]) + if index: + middle &= 0x0F + else: + middle = (0xF0 & middle) >> 4 + offset = base_offset + index * 4 + node_bytes = byte_from_int( + middle) + self._buffer[offset:offset + 3] + elif record_size == 32: + offset = base_offset + index * 4 + node_bytes = self._buffer[offset:offset + 4] + else: + raise InvalidDatabaseError( + 'Unknown record size: {0}'.format(record_size)) + return struct.unpack(b'!I', node_bytes)[0] + + def _resolve_data_pointer(self, pointer): + resolved = pointer - self._metadata.node_count + \ + self._metadata.search_tree_size + + if resolved > self._buffer_size: + raise InvalidDatabaseError( + "The MaxMind DB file's search tree is corrupt") + + (data, _) = self._decoder.decode(resolved) + return data + + def close(self): + """Closes the MaxMind DB file and returns the resources to the system""" + # pylint: disable=unidiomatic-typecheck + if type(self._buffer) not in (str, bytes): + self._buffer.close() + + +class Metadata(object): + + """Metadata for the MaxMind DB reader""" + + # pylint: disable=too-many-instance-attributes + def __init__(self, **kwargs): + """Creates new Metadata object. kwargs are key/value pairs from spec""" + # Although I could just update __dict__, that is less obvious and it + # doesn't work well with static analysis tools and some IDEs + self.node_count = kwargs['node_count'] + self.record_size = kwargs['record_size'] + self.ip_version = kwargs['ip_version'] + self.database_type = kwargs['database_type'] + self.languages = kwargs['languages'] + self.binary_format_major_version = kwargs[ + 'binary_format_major_version'] + self.binary_format_minor_version = kwargs[ + 'binary_format_minor_version'] + self.build_epoch = kwargs['build_epoch'] + self.description = kwargs['description'] + + @property + def node_byte_size(self): + """The size of a node in bytes""" + return self.record_size // 4 + + @property + def search_tree_size(self): + """The size of the search tree""" + return self.node_count * self.node_byte_size + + def __repr__(self): + args = ', '.join('%s=%r' % x for x in self.__dict__.items()) + return '{module}.{class_name}({data})'.format( + module=self.__module__, + class_name=self.__class__.__name__, + data=args) diff --git a/plugins/Sidebar/media-globe/Detector.js b/plugins/Sidebar/media-globe/Detector.js new file mode 100644 index 00000000..1c074b83 --- /dev/null +++ b/plugins/Sidebar/media-globe/Detector.js @@ -0,0 +1,60 @@ +/** + * @author alteredq / http://alteredqualia.com/ + * @author mr.doob / http://mrdoob.com/ + */ + +Detector = { + + canvas : !! window.CanvasRenderingContext2D, + webgl : ( function () { try { return !! window.WebGLRenderingContext && !! document.createElement( 'canvas' ).getContext( 'experimental-webgl' ); } catch( e ) { return false; } } )(), + workers : !! window.Worker, + fileapi : window.File && window.FileReader && window.FileList && window.Blob, + + getWebGLErrorMessage : function () { + + var domElement = document.createElement( 'div' ); + + domElement.style.fontFamily = 'monospace'; + domElement.style.fontSize = '13px'; + domElement.style.textAlign = 'center'; + domElement.style.background = '#eee'; + domElement.style.color = '#000'; + domElement.style.padding = '1em'; + domElement.style.width = '475px'; + domElement.style.margin = '5em auto 0'; + + if ( ! this.webgl ) { + + domElement.innerHTML = window.WebGLRenderingContext ? [ + 'Sorry, your graphics card doesn\'t support WebGL' + ].join( '\n' ) : [ + 'Sorry, your browser doesn\'t support WebGL
    ', + 'Please try with', + 'Chrome, ', + 'Firefox 4 or', + 'Webkit Nightly (Mac)' + ].join( '\n' ); + + } + + return domElement; + + }, + + addGetWebGLMessage : function ( parameters ) { + + var parent, id, domElement; + + parameters = parameters || {}; + + parent = parameters.parent !== undefined ? parameters.parent : document.body; + id = parameters.id !== undefined ? parameters.id : 'oldie'; + + domElement = Detector.getWebGLErrorMessage(); + domElement.id = id; + + parent.appendChild( domElement ); + + } + +}; diff --git a/plugins/Sidebar/media-globe/Tween.js b/plugins/Sidebar/media-globe/Tween.js new file mode 100644 index 00000000..bdf141ad --- /dev/null +++ b/plugins/Sidebar/media-globe/Tween.js @@ -0,0 +1,12 @@ +// Tween.js - http://github.com/sole/tween.js +var TWEEN=TWEEN||function(){var a,e,c,d,f=[];return{start:function(g){c=setInterval(this.update,1E3/(g||60))},stop:function(){clearInterval(c)},add:function(g){f.push(g)},remove:function(g){a=f.indexOf(g);a!==-1&&f.splice(a,1)},update:function(){a=0;e=f.length;for(d=(new Date).getTime();a1?1:b;i=n(b);for(h in c)a[h]=e[h]+c[h]*i;l!==null&&l.call(a,i);if(b==1){m!==null&&m.call(a);k!==null&&k.start();return false}return true}};TWEEN.Easing={Linear:{},Quadratic:{},Cubic:{},Quartic:{},Quintic:{},Sinusoidal:{},Exponential:{},Circular:{},Elastic:{},Back:{},Bounce:{}};TWEEN.Easing.Linear.EaseNone=function(a){return a}; +TWEEN.Easing.Quadratic.EaseIn=function(a){return a*a};TWEEN.Easing.Quadratic.EaseOut=function(a){return-a*(a-2)};TWEEN.Easing.Quadratic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a;return-0.5*(--a*(a-2)-1)};TWEEN.Easing.Cubic.EaseIn=function(a){return a*a*a};TWEEN.Easing.Cubic.EaseOut=function(a){return--a*a*a+1};TWEEN.Easing.Cubic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a;return 0.5*((a-=2)*a*a+2)};TWEEN.Easing.Quartic.EaseIn=function(a){return a*a*a*a}; +TWEEN.Easing.Quartic.EaseOut=function(a){return-(--a*a*a*a-1)};TWEEN.Easing.Quartic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a*a;return-0.5*((a-=2)*a*a*a-2)};TWEEN.Easing.Quintic.EaseIn=function(a){return a*a*a*a*a};TWEEN.Easing.Quintic.EaseOut=function(a){return(a-=1)*a*a*a*a+1};TWEEN.Easing.Quintic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a*a*a;return 0.5*((a-=2)*a*a*a*a+2)};TWEEN.Easing.Sinusoidal.EaseIn=function(a){return-Math.cos(a*Math.PI/2)+1}; +TWEEN.Easing.Sinusoidal.EaseOut=function(a){return Math.sin(a*Math.PI/2)};TWEEN.Easing.Sinusoidal.EaseInOut=function(a){return-0.5*(Math.cos(Math.PI*a)-1)};TWEEN.Easing.Exponential.EaseIn=function(a){return a==0?0:Math.pow(2,10*(a-1))};TWEEN.Easing.Exponential.EaseOut=function(a){return a==1?1:-Math.pow(2,-10*a)+1};TWEEN.Easing.Exponential.EaseInOut=function(a){if(a==0)return 0;if(a==1)return 1;if((a*=2)<1)return 0.5*Math.pow(2,10*(a-1));return 0.5*(-Math.pow(2,-10*(a-1))+2)}; +TWEEN.Easing.Circular.EaseIn=function(a){return-(Math.sqrt(1-a*a)-1)};TWEEN.Easing.Circular.EaseOut=function(a){return Math.sqrt(1- --a*a)};TWEEN.Easing.Circular.EaseInOut=function(a){if((a/=0.5)<1)return-0.5*(Math.sqrt(1-a*a)-1);return 0.5*(Math.sqrt(1-(a-=2)*a)+1)};TWEEN.Easing.Elastic.EaseIn=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);return-(c*Math.pow(2,10*(a-=1))*Math.sin((a-e)*2*Math.PI/d))}; +TWEEN.Easing.Elastic.EaseOut=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);return c*Math.pow(2,-10*a)*Math.sin((a-e)*2*Math.PI/d)+1}; +TWEEN.Easing.Elastic.EaseInOut=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);if((a*=2)<1)return-0.5*c*Math.pow(2,10*(a-=1))*Math.sin((a-e)*2*Math.PI/d);return c*Math.pow(2,-10*(a-=1))*Math.sin((a-e)*2*Math.PI/d)*0.5+1};TWEEN.Easing.Back.EaseIn=function(a){return a*a*(2.70158*a-1.70158)};TWEEN.Easing.Back.EaseOut=function(a){return(a-=1)*a*(2.70158*a+1.70158)+1}; +TWEEN.Easing.Back.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*(3.5949095*a-2.5949095);return 0.5*((a-=2)*a*(3.5949095*a+2.5949095)+2)};TWEEN.Easing.Bounce.EaseIn=function(a){return 1-TWEEN.Easing.Bounce.EaseOut(1-a)};TWEEN.Easing.Bounce.EaseOut=function(a){return(a/=1)<1/2.75?7.5625*a*a:a<2/2.75?7.5625*(a-=1.5/2.75)*a+0.75:a<2.5/2.75?7.5625*(a-=2.25/2.75)*a+0.9375:7.5625*(a-=2.625/2.75)*a+0.984375}; +TWEEN.Easing.Bounce.EaseInOut=function(a){if(a<0.5)return TWEEN.Easing.Bounce.EaseIn(a*2)*0.5;return TWEEN.Easing.Bounce.EaseOut(a*2-1)*0.5+0.5}; diff --git a/plugins/Sidebar/media-globe/all.js b/plugins/Sidebar/media-globe/all.js new file mode 100644 index 00000000..8b6e5b6b --- /dev/null +++ b/plugins/Sidebar/media-globe/all.js @@ -0,0 +1,1333 @@ + + +/* ---- plugins/Sidebar/media-globe/Detector.js ---- */ + + +/** + * @author alteredq / http://alteredqualia.com/ + * @author mr.doob / http://mrdoob.com/ + */ + +Detector = { + + canvas : !! window.CanvasRenderingContext2D, + webgl : ( function () { try { return !! window.WebGLRenderingContext && !! document.createElement( 'canvas' ).getContext( 'experimental-webgl' ); } catch( e ) { return false; } } )(), + workers : !! window.Worker, + fileapi : window.File && window.FileReader && window.FileList && window.Blob, + + getWebGLErrorMessage : function () { + + var domElement = document.createElement( 'div' ); + + domElement.style.fontFamily = 'monospace'; + domElement.style.fontSize = '13px'; + domElement.style.textAlign = 'center'; + domElement.style.background = '#eee'; + domElement.style.color = '#000'; + domElement.style.padding = '1em'; + domElement.style.width = '475px'; + domElement.style.margin = '5em auto 0'; + + if ( ! this.webgl ) { + + domElement.innerHTML = window.WebGLRenderingContext ? [ + 'Sorry, your graphics card doesn\'t support WebGL' + ].join( '\n' ) : [ + 'Sorry, your browser doesn\'t support WebGL
    ', + 'Please try with', + 'Chrome, ', + 'Firefox 4 or', + 'Webkit Nightly (Mac)' + ].join( '\n' ); + + } + + return domElement; + + }, + + addGetWebGLMessage : function ( parameters ) { + + var parent, id, domElement; + + parameters = parameters || {}; + + parent = parameters.parent !== undefined ? parameters.parent : document.body; + id = parameters.id !== undefined ? parameters.id : 'oldie'; + + domElement = Detector.getWebGLErrorMessage(); + domElement.id = id; + + parent.appendChild( domElement ); + + } + +}; + + + +/* ---- plugins/Sidebar/media-globe/Tween.js ---- */ + + +// Tween.js - http://github.com/sole/tween.js +var TWEEN=TWEEN||function(){var a,e,c,d,f=[];return{start:function(g){c=setInterval(this.update,1E3/(g||60))},stop:function(){clearInterval(c)},add:function(g){f.push(g)},remove:function(g){a=f.indexOf(g);a!==-1&&f.splice(a,1)},update:function(){a=0;e=f.length;for(d=(new Date).getTime();a1?1:b;i=n(b);for(h in c)a[h]=e[h]+c[h]*i;l!==null&&l.call(a,i);if(b==1){m!==null&&m.call(a);k!==null&&k.start();return false}return true}};TWEEN.Easing={Linear:{},Quadratic:{},Cubic:{},Quartic:{},Quintic:{},Sinusoidal:{},Exponential:{},Circular:{},Elastic:{},Back:{},Bounce:{}};TWEEN.Easing.Linear.EaseNone=function(a){return a}; +TWEEN.Easing.Quadratic.EaseIn=function(a){return a*a};TWEEN.Easing.Quadratic.EaseOut=function(a){return-a*(a-2)};TWEEN.Easing.Quadratic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a;return-0.5*(--a*(a-2)-1)};TWEEN.Easing.Cubic.EaseIn=function(a){return a*a*a};TWEEN.Easing.Cubic.EaseOut=function(a){return--a*a*a+1};TWEEN.Easing.Cubic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a;return 0.5*((a-=2)*a*a+2)};TWEEN.Easing.Quartic.EaseIn=function(a){return a*a*a*a}; +TWEEN.Easing.Quartic.EaseOut=function(a){return-(--a*a*a*a-1)};TWEEN.Easing.Quartic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a*a;return-0.5*((a-=2)*a*a*a-2)};TWEEN.Easing.Quintic.EaseIn=function(a){return a*a*a*a*a};TWEEN.Easing.Quintic.EaseOut=function(a){return(a-=1)*a*a*a*a+1};TWEEN.Easing.Quintic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a*a*a;return 0.5*((a-=2)*a*a*a*a+2)};TWEEN.Easing.Sinusoidal.EaseIn=function(a){return-Math.cos(a*Math.PI/2)+1}; +TWEEN.Easing.Sinusoidal.EaseOut=function(a){return Math.sin(a*Math.PI/2)};TWEEN.Easing.Sinusoidal.EaseInOut=function(a){return-0.5*(Math.cos(Math.PI*a)-1)};TWEEN.Easing.Exponential.EaseIn=function(a){return a==0?0:Math.pow(2,10*(a-1))};TWEEN.Easing.Exponential.EaseOut=function(a){return a==1?1:-Math.pow(2,-10*a)+1};TWEEN.Easing.Exponential.EaseInOut=function(a){if(a==0)return 0;if(a==1)return 1;if((a*=2)<1)return 0.5*Math.pow(2,10*(a-1));return 0.5*(-Math.pow(2,-10*(a-1))+2)}; +TWEEN.Easing.Circular.EaseIn=function(a){return-(Math.sqrt(1-a*a)-1)};TWEEN.Easing.Circular.EaseOut=function(a){return Math.sqrt(1- --a*a)};TWEEN.Easing.Circular.EaseInOut=function(a){if((a/=0.5)<1)return-0.5*(Math.sqrt(1-a*a)-1);return 0.5*(Math.sqrt(1-(a-=2)*a)+1)};TWEEN.Easing.Elastic.EaseIn=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);return-(c*Math.pow(2,10*(a-=1))*Math.sin((a-e)*2*Math.PI/d))}; +TWEEN.Easing.Elastic.EaseOut=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);return c*Math.pow(2,-10*a)*Math.sin((a-e)*2*Math.PI/d)+1}; +TWEEN.Easing.Elastic.EaseInOut=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);if((a*=2)<1)return-0.5*c*Math.pow(2,10*(a-=1))*Math.sin((a-e)*2*Math.PI/d);return c*Math.pow(2,-10*(a-=1))*Math.sin((a-e)*2*Math.PI/d)*0.5+1};TWEEN.Easing.Back.EaseIn=function(a){return a*a*(2.70158*a-1.70158)};TWEEN.Easing.Back.EaseOut=function(a){return(a-=1)*a*(2.70158*a+1.70158)+1}; +TWEEN.Easing.Back.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*(3.5949095*a-2.5949095);return 0.5*((a-=2)*a*(3.5949095*a+2.5949095)+2)};TWEEN.Easing.Bounce.EaseIn=function(a){return 1-TWEEN.Easing.Bounce.EaseOut(1-a)};TWEEN.Easing.Bounce.EaseOut=function(a){return(a/=1)<1/2.75?7.5625*a*a:a<2/2.75?7.5625*(a-=1.5/2.75)*a+0.75:a<2.5/2.75?7.5625*(a-=2.25/2.75)*a+0.9375:7.5625*(a-=2.625/2.75)*a+0.984375}; +TWEEN.Easing.Bounce.EaseInOut=function(a){if(a<0.5)return TWEEN.Easing.Bounce.EaseIn(a*2)*0.5;return TWEEN.Easing.Bounce.EaseOut(a*2-1)*0.5+0.5}; + + + +/* ---- plugins/Sidebar/media-globe/globe.js ---- */ + + +/** + * dat.globe Javascript WebGL Globe Toolkit + * http://dataarts.github.com/dat.globe + * + * Copyright 2011 Data Arts Team, Google Creative Lab + * + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ + +var DAT = DAT || {}; + +DAT.Globe = function(container, opts) { + opts = opts || {}; + + var colorFn = opts.colorFn || function(x) { + var c = new THREE.Color(); + c.setHSL( ( 0.5 - (x * 2) ), Math.max(0.8, 1.0 - (x * 3)), 0.5 ); + return c; + }; + var imgDir = opts.imgDir || '/globe/'; + + var Shaders = { + 'earth' : { + uniforms: { + 'texture': { type: 't', value: null } + }, + vertexShader: [ + 'varying vec3 vNormal;', + 'varying vec2 vUv;', + 'void main() {', + 'gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );', + 'vNormal = normalize( normalMatrix * normal );', + 'vUv = uv;', + '}' + ].join('\n'), + fragmentShader: [ + 'uniform sampler2D texture;', + 'varying vec3 vNormal;', + 'varying vec2 vUv;', + 'void main() {', + 'vec3 diffuse = texture2D( texture, vUv ).xyz;', + 'float intensity = 1.05 - dot( vNormal, vec3( 0.0, 0.0, 1.0 ) );', + 'vec3 atmosphere = vec3( 1.0, 1.0, 1.0 ) * pow( intensity, 3.0 );', + 'gl_FragColor = vec4( diffuse + atmosphere, 1.0 );', + '}' + ].join('\n') + }, + 'atmosphere' : { + uniforms: {}, + vertexShader: [ + 'varying vec3 vNormal;', + 'void main() {', + 'vNormal = normalize( normalMatrix * normal );', + 'gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );', + '}' + ].join('\n'), + fragmentShader: [ + 'varying vec3 vNormal;', + 'void main() {', + 'float intensity = pow( 0.8 - dot( vNormal, vec3( 0, 0, 1.0 ) ), 12.0 );', + 'gl_FragColor = vec4( 1.0, 1.0, 1.0, 1.0 ) * intensity;', + '}' + ].join('\n') + } + }; + + var camera, scene, renderer, w, h; + var mesh, atmosphere, point, running; + + var overRenderer; + var running = true; + + var curZoomSpeed = 0; + var zoomSpeed = 50; + + var mouse = { x: 0, y: 0 }, mouseOnDown = { x: 0, y: 0 }; + var rotation = { x: 0, y: 0 }, + target = { x: Math.PI*3/2, y: Math.PI / 6.0 }, + targetOnDown = { x: 0, y: 0 }; + + var distance = 100000, distanceTarget = 100000; + var padding = 10; + var PI_HALF = Math.PI / 2; + + function init() { + + container.style.color = '#fff'; + container.style.font = '13px/20px Arial, sans-serif'; + + var shader, uniforms, material; + w = container.offsetWidth || window.innerWidth; + h = container.offsetHeight || window.innerHeight; + + camera = new THREE.PerspectiveCamera(30, w / h, 1, 10000); + camera.position.z = distance; + + scene = new THREE.Scene(); + + var geometry = new THREE.SphereGeometry(200, 40, 30); + + shader = Shaders['earth']; + uniforms = THREE.UniformsUtils.clone(shader.uniforms); + + uniforms['texture'].value = THREE.ImageUtils.loadTexture(imgDir+'world.jpg'); + + material = new THREE.ShaderMaterial({ + + uniforms: uniforms, + vertexShader: shader.vertexShader, + fragmentShader: shader.fragmentShader + + }); + + mesh = new THREE.Mesh(geometry, material); + mesh.rotation.y = Math.PI; + scene.add(mesh); + + shader = Shaders['atmosphere']; + uniforms = THREE.UniformsUtils.clone(shader.uniforms); + + material = new THREE.ShaderMaterial({ + + uniforms: uniforms, + vertexShader: shader.vertexShader, + fragmentShader: shader.fragmentShader, + side: THREE.BackSide, + blending: THREE.AdditiveBlending, + transparent: true + + }); + + mesh = new THREE.Mesh(geometry, material); + mesh.scale.set( 1.1, 1.1, 1.1 ); + scene.add(mesh); + + geometry = new THREE.BoxGeometry(2.75, 2.75, 1); + geometry.applyMatrix(new THREE.Matrix4().makeTranslation(0,0,-0.5)); + + point = new THREE.Mesh(geometry); + + renderer = new THREE.WebGLRenderer({antialias: true}); + renderer.setSize(w, h); + renderer.setClearColor( 0x212121, 1 ); + + renderer.domElement.style.position = 'relative'; + + container.appendChild(renderer.domElement); + + container.addEventListener('mousedown', onMouseDown, false); + + container.addEventListener('mousewheel', onMouseWheel, false); + + document.addEventListener('keydown', onDocumentKeyDown, false); + + window.addEventListener('resize', onWindowResize, false); + + container.addEventListener('mouseover', function() { + overRenderer = true; + }, false); + + container.addEventListener('mouseout', function() { + overRenderer = false; + }, false); + } + + function addData(data, opts) { + var lat, lng, size, color, i, step, colorFnWrapper; + + opts.animated = opts.animated || false; + this.is_animated = opts.animated; + opts.format = opts.format || 'magnitude'; // other option is 'legend' + if (opts.format === 'magnitude') { + step = 3; + colorFnWrapper = function(data, i) { return colorFn(data[i+2]); } + } else if (opts.format === 'legend') { + step = 4; + colorFnWrapper = function(data, i) { return colorFn(data[i+3]); } + } else if (opts.format === 'peer') { + colorFnWrapper = function(data, i) { return colorFn(data[i+2]); } + } else { + throw('error: format not supported: '+opts.format); + } + + if (opts.animated) { + if (this._baseGeometry === undefined) { + this._baseGeometry = new THREE.Geometry(); + for (i = 0; i < data.length; i += step) { + lat = data[i]; + lng = data[i + 1]; +// size = data[i + 2]; + color = colorFnWrapper(data,i); + size = 0; + addPoint(lat, lng, size, color, this._baseGeometry); + } + } + if(this._morphTargetId === undefined) { + this._morphTargetId = 0; + } else { + this._morphTargetId += 1; + } + opts.name = opts.name || 'morphTarget'+this._morphTargetId; + } + var subgeo = new THREE.Geometry(); + for (i = 0; i < data.length; i += step) { + lat = data[i]; + lng = data[i + 1]; + color = colorFnWrapper(data,i); + size = data[i + 2]; + size = size*200; + addPoint(lat, lng, size, color, subgeo); + } + if (opts.animated) { + this._baseGeometry.morphTargets.push({'name': opts.name, vertices: subgeo.vertices}); + } else { + this._baseGeometry = subgeo; + } + + }; + + function createPoints() { + if (this._baseGeometry !== undefined) { + if (this.is_animated === false) { + this.points = new THREE.Mesh(this._baseGeometry, new THREE.MeshBasicMaterial({ + color: 0xffffff, + vertexColors: THREE.FaceColors, + morphTargets: false + })); + } else { + if (this._baseGeometry.morphTargets.length < 8) { + console.log('t l',this._baseGeometry.morphTargets.length); + var padding = 8-this._baseGeometry.morphTargets.length; + console.log('padding', padding); + for(var i=0; i<=padding; i++) { + console.log('padding',i); + this._baseGeometry.morphTargets.push({'name': 'morphPadding'+i, vertices: this._baseGeometry.vertices}); + } + } + this.points = new THREE.Mesh(this._baseGeometry, new THREE.MeshBasicMaterial({ + color: 0xffffff, + vertexColors: THREE.FaceColors, + morphTargets: true + })); + } + scene.add(this.points); + } + } + + function addPoint(lat, lng, size, color, subgeo) { + + var phi = (90 - lat) * Math.PI / 180; + var theta = (180 - lng) * Math.PI / 180; + + point.position.x = 200 * Math.sin(phi) * Math.cos(theta); + point.position.y = 200 * Math.cos(phi); + point.position.z = 200 * Math.sin(phi) * Math.sin(theta); + + point.lookAt(mesh.position); + + point.scale.z = Math.max( size, 0.1 ); // avoid non-invertible matrix + point.updateMatrix(); + + for (var i = 0; i < point.geometry.faces.length; i++) { + + point.geometry.faces[i].color = color; + + } + if(point.matrixAutoUpdate){ + point.updateMatrix(); + } + subgeo.merge(point.geometry, point.matrix); + } + + function onMouseDown(event) { + event.preventDefault(); + + container.addEventListener('mousemove', onMouseMove, false); + container.addEventListener('mouseup', onMouseUp, false); + container.addEventListener('mouseout', onMouseOut, false); + + mouseOnDown.x = - event.clientX; + mouseOnDown.y = event.clientY; + + targetOnDown.x = target.x; + targetOnDown.y = target.y; + + container.style.cursor = 'move'; + } + + function onMouseMove(event) { + mouse.x = - event.clientX; + mouse.y = event.clientY; + + var zoomDamp = distance/1000; + + target.x = targetOnDown.x + (mouse.x - mouseOnDown.x) * 0.005 * zoomDamp; + target.y = targetOnDown.y + (mouse.y - mouseOnDown.y) * 0.005 * zoomDamp; + + target.y = target.y > PI_HALF ? PI_HALF : target.y; + target.y = target.y < - PI_HALF ? - PI_HALF : target.y; + } + + function onMouseUp(event) { + container.removeEventListener('mousemove', onMouseMove, false); + container.removeEventListener('mouseup', onMouseUp, false); + container.removeEventListener('mouseout', onMouseOut, false); + container.style.cursor = 'auto'; + } + + function onMouseOut(event) { + container.removeEventListener('mousemove', onMouseMove, false); + container.removeEventListener('mouseup', onMouseUp, false); + container.removeEventListener('mouseout', onMouseOut, false); + } + + function onMouseWheel(event) { + event.preventDefault(); + if (overRenderer) { + zoom(event.wheelDeltaY * 0.3); + } + return false; + } + + function onDocumentKeyDown(event) { + switch (event.keyCode) { + case 38: + zoom(100); + event.preventDefault(); + break; + case 40: + zoom(-100); + event.preventDefault(); + break; + } + } + + function onWindowResize( event ) { + camera.aspect = container.offsetWidth / container.offsetHeight; + camera.updateProjectionMatrix(); + renderer.setSize( container.offsetWidth, container.offsetHeight ); + } + + function zoom(delta) { + distanceTarget -= delta; + distanceTarget = distanceTarget > 855 ? 855 : distanceTarget; + distanceTarget = distanceTarget < 350 ? 350 : distanceTarget; + } + + function animate() { + if (!running) return + requestAnimationFrame(animate); + render(); + } + + function render() { + zoom(curZoomSpeed); + + rotation.x += (target.x - rotation.x) * 0.1; + rotation.y += (target.y - rotation.y) * 0.1; + distance += (distanceTarget - distance) * 0.3; + + camera.position.x = distance * Math.sin(rotation.x) * Math.cos(rotation.y); + camera.position.y = distance * Math.sin(rotation.y); + camera.position.z = distance * Math.cos(rotation.x) * Math.cos(rotation.y); + + camera.lookAt(mesh.position); + + renderer.render(scene, camera); + } + + function unload() { + running = false + container.removeEventListener('mousedown', onMouseDown, false); + container.removeEventListener('mousewheel', onMouseWheel, false); + document.removeEventListener('keydown', onDocumentKeyDown, false); + window.removeEventListener('resize', onWindowResize, false); + + } + + init(); + this.animate = animate; + this.unload = unload; + + + this.__defineGetter__('time', function() { + return this._time || 0; + }); + + this.__defineSetter__('time', function(t) { + var validMorphs = []; + var morphDict = this.points.morphTargetDictionary; + for(var k in morphDict) { + if(k.indexOf('morphPadding') < 0) { + validMorphs.push(morphDict[k]); + } + } + validMorphs.sort(); + var l = validMorphs.length-1; + var scaledt = t*l+1; + var index = Math.floor(scaledt); + for (i=0;i= 0) { + this.points.morphTargetInfluences[lastIndex] = 1 - leftover; + } + this.points.morphTargetInfluences[index] = leftover; + this._time = t; + }); + + this.addData = addData; + this.createPoints = createPoints; + this.renderer = renderer; + this.scene = scene; + + return this; + +}; + + + + +/* ---- plugins/Sidebar/media-globe/three.min.js ---- */ + + +// threejs.org/license +'use strict';var THREE={REVISION:"69"};"object"===typeof module&&(module.exports=THREE);void 0===Math.sign&&(Math.sign=function(a){return 0>a?-1:0>16&255)/255;this.g=(a>>8&255)/255;this.b=(a&255)/255;return this},setRGB:function(a,b,c){this.r=a;this.g=b;this.b=c;return this},setHSL:function(a,b,c){if(0===b)this.r=this.g=this.b=c;else{var d=function(a,b,c){0>c&&(c+=1);1c?b:c<2/3?a+6*(b-a)*(2/3-c):a};b=.5>=c?c*(1+b):c+b-c*b;c=2*c-b;this.r=d(c,b,a+1/3);this.g=d(c,b,a);this.b=d(c,b,a-1/3)}return this},setStyle:function(a){if(/^rgb\((\d+), ?(\d+), ?(\d+)\)$/i.test(a))return a=/^rgb\((\d+), ?(\d+), ?(\d+)\)$/i.exec(a),this.r=Math.min(255,parseInt(a[1],10))/255,this.g=Math.min(255,parseInt(a[2],10))/255,this.b=Math.min(255,parseInt(a[3],10))/255,this;if(/^rgb\((\d+)\%, ?(\d+)\%, ?(\d+)\%\)$/i.test(a))return a=/^rgb\((\d+)\%, ?(\d+)\%, ?(\d+)\%\)$/i.exec(a),this.r= +Math.min(100,parseInt(a[1],10))/100,this.g=Math.min(100,parseInt(a[2],10))/100,this.b=Math.min(100,parseInt(a[3],10))/100,this;if(/^\#([0-9a-f]{6})$/i.test(a))return a=/^\#([0-9a-f]{6})$/i.exec(a),this.setHex(parseInt(a[1],16)),this;if(/^\#([0-9a-f])([0-9a-f])([0-9a-f])$/i.test(a))return a=/^\#([0-9a-f])([0-9a-f])([0-9a-f])$/i.exec(a),this.setHex(parseInt(a[1]+a[1]+a[2]+a[2]+a[3]+a[3],16)),this;if(/^(\w+)$/i.test(a))return this.setHex(THREE.ColorKeywords[a]),this},copy:function(a){this.r=a.r;this.g= +a.g;this.b=a.b;return this},copyGammaToLinear:function(a){this.r=a.r*a.r;this.g=a.g*a.g;this.b=a.b*a.b;return this},copyLinearToGamma:function(a){this.r=Math.sqrt(a.r);this.g=Math.sqrt(a.g);this.b=Math.sqrt(a.b);return this},convertGammaToLinear:function(){var a=this.r,b=this.g,c=this.b;this.r=a*a;this.g=b*b;this.b=c*c;return this},convertLinearToGamma:function(){this.r=Math.sqrt(this.r);this.g=Math.sqrt(this.g);this.b=Math.sqrt(this.b);return this},getHex:function(){return 255*this.r<<16^255*this.g<< +8^255*this.b<<0},getHexString:function(){return("000000"+this.getHex().toString(16)).slice(-6)},getHSL:function(a){a=a||{h:0,s:0,l:0};var b=this.r,c=this.g,d=this.b,e=Math.max(b,c,d),f=Math.min(b,c,d),g,h=(f+e)/2;if(f===e)f=g=0;else{var k=e-f,f=.5>=h?k/(e+f):k/(2-e-f);switch(e){case b:g=(c-d)/k+(cf&&c>b?(c=2*Math.sqrt(1+c-f-b),this._w=(k-g)/c,this._x=.25*c,this._y=(a+e)/c,this._z=(d+h)/c):f>b?(c=2*Math.sqrt(1+f-c-b),this._w=(d-h)/c,this._x=(a+e)/c,this._y= +.25*c,this._z=(g+k)/c):(c=2*Math.sqrt(1+b-c-f),this._w=(e-a)/c,this._x=(d+h)/c,this._y=(g+k)/c,this._z=.25*c);this.onChangeCallback();return this},setFromUnitVectors:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector3);b=c.dot(d)+1;1E-6>b?(b=0,Math.abs(c.x)>Math.abs(c.z)?a.set(-c.y,c.x,0):a.set(0,-c.z,c.y)):a.crossVectors(c,d);this._x=a.x;this._y=a.y;this._z=a.z;this._w=b;this.normalize();return this}}(),inverse:function(){this.conjugate().normalize();return this},conjugate:function(){this._x*= +-1;this._y*=-1;this._z*=-1;this.onChangeCallback();return this},dot:function(a){return this._x*a._x+this._y*a._y+this._z*a._z+this._w*a._w},lengthSq:function(){return this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w},length:function(){return Math.sqrt(this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w)},normalize:function(){var a=this.length();0===a?(this._z=this._y=this._x=0,this._w=1):(a=1/a,this._x*=a,this._y*=a,this._z*=a,this._w*=a);this.onChangeCallback();return this}, +multiply:function(a,b){return void 0!==b?(console.warn("THREE.Quaternion: .multiply() now only accepts one argument. Use .multiplyQuaternions( a, b ) instead."),this.multiplyQuaternions(a,b)):this.multiplyQuaternions(this,a)},multiplyQuaternions:function(a,b){var c=a._x,d=a._y,e=a._z,f=a._w,g=b._x,h=b._y,k=b._z,n=b._w;this._x=c*n+f*g+d*k-e*h;this._y=d*n+f*h+e*g-c*k;this._z=e*n+f*k+c*h-d*g;this._w=f*n-c*g-d*h-e*k;this.onChangeCallback();return this},multiplyVector3:function(a){console.warn("THREE.Quaternion: .multiplyVector3() has been removed. Use is now vector.applyQuaternion( quaternion ) instead."); +return a.applyQuaternion(this)},slerp:function(a,b){if(0===b)return this;if(1===b)return this.copy(a);var c=this._x,d=this._y,e=this._z,f=this._w,g=f*a._w+c*a._x+d*a._y+e*a._z;0>g?(this._w=-a._w,this._x=-a._x,this._y=-a._y,this._z=-a._z,g=-g):this.copy(a);if(1<=g)return this._w=f,this._x=c,this._y=d,this._z=e,this;var h=Math.acos(g),k=Math.sqrt(1-g*g);if(.001>Math.abs(k))return this._w=.5*(f+this._w),this._x=.5*(c+this._x),this._y=.5*(d+this._y),this._z=.5*(e+this._z),this;g=Math.sin((1-b)*h)/k;h= +Math.sin(b*h)/k;this._w=f*g+this._w*h;this._x=c*g+this._x*h;this._y=d*g+this._y*h;this._z=e*g+this._z*h;this.onChangeCallback();return this},equals:function(a){return a._x===this._x&&a._y===this._y&&a._z===this._z&&a._w===this._w},fromArray:function(a,b){void 0===b&&(b=0);this._x=a[b];this._y=a[b+1];this._z=a[b+2];this._w=a[b+3];this.onChangeCallback();return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this._x;a[b+1]=this._y;a[b+2]=this._z;a[b+3]=this._w;return a},onChange:function(a){this.onChangeCallback= +a;return this},onChangeCallback:function(){},clone:function(){return new THREE.Quaternion(this._x,this._y,this._z,this._w)}};THREE.Quaternion.slerp=function(a,b,c,d){return c.copy(a).slerp(b,d)};THREE.Vector2=function(a,b){this.x=a||0;this.y=b||0}; +THREE.Vector2.prototype={constructor:THREE.Vector2,set:function(a,b){this.x=a;this.y=b;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x;case 1:return this.y;default:throw Error("index is out of range: "+a);}},copy:function(a){this.x=a.x;this.y=a.y;return this},add:function(a, +b){if(void 0!==b)return console.warn("THREE.Vector2: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;return this},addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;return this},addScalar:function(a){this.x+=a;this.y+=a;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector2: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(a,b);this.x-=a.x;this.y-=a.y;return this}, +subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;return this},multiply:function(a){this.x*=a.x;this.y*=a.y;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;return this},divide:function(a){this.x/=a.x;this.y/=a.y;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a):this.y=this.x=0;return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector2,b=new THREE.Vector2);a.set(c,c);b.set(d,d);return this.clamp(a,b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);return this}, +roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);return this},negate:function(){this.x=-this.x;this.y=-this.y;return this},dot:function(a){return this.x*a.x+this.y*a.y},lengthSq:function(){return this.x*this.x+this.y*this.y},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y)},normalize:function(){return this.divideScalar(this.length())},distanceTo:function(a){return Math.sqrt(this.distanceToSquared(a))},distanceToSquared:function(a){var b= +this.x-a.x;a=this.y-a.y;return b*b+a*a},setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;return this},equals:function(a){return a.x===this.x&&a.y===this.y},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;return a},clone:function(){return new THREE.Vector2(this.x,this.y)}}; +THREE.Vector3=function(a,b,c){this.x=a||0;this.y=b||0;this.z=c||0}; +THREE.Vector3.prototype={constructor:THREE.Vector3,set:function(a,b,c){this.x=a;this.y=b;this.z=c;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setZ:function(a){this.z=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;case 2:this.z=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x;case 1:return this.y;case 2:return this.z;default:throw Error("index is out of range: "+ +a);}},copy:function(a){this.x=a.x;this.y=a.y;this.z=a.z;return this},add:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;this.z+=a.z;return this},addScalar:function(a){this.x+=a;this.y+=a;this.z+=a;return this},addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;this.z=a.z+b.z;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."), +this.subVectors(a,b);this.x-=a.x;this.y-=a.y;this.z-=a.z;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;this.z=a.z-b.z;return this},multiply:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .multiply() now only accepts one argument. Use .multiplyVectors( a, b ) instead."),this.multiplyVectors(a,b);this.x*=a.x;this.y*=a.y;this.z*=a.z;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;this.z*=a;return this},multiplyVectors:function(a,b){this.x=a.x*b.x;this.y= +a.y*b.y;this.z=a.z*b.z;return this},applyEuler:function(){var a;return function(b){!1===b instanceof THREE.Euler&&console.error("THREE.Vector3: .applyEuler() now expects a Euler rotation rather than a Vector3 and order.");void 0===a&&(a=new THREE.Quaternion);this.applyQuaternion(a.setFromEuler(b));return this}}(),applyAxisAngle:function(){var a;return function(b,c){void 0===a&&(a=new THREE.Quaternion);this.applyQuaternion(a.setFromAxisAngle(b,c));return this}}(),applyMatrix3:function(a){var b=this.x, +c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[3]*c+a[6]*d;this.y=a[1]*b+a[4]*c+a[7]*d;this.z=a[2]*b+a[5]*c+a[8]*d;return this},applyMatrix4:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d+a[12];this.y=a[1]*b+a[5]*c+a[9]*d+a[13];this.z=a[2]*b+a[6]*c+a[10]*d+a[14];return this},applyProjection:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;var e=1/(a[3]*b+a[7]*c+a[11]*d+a[15]);this.x=(a[0]*b+a[4]*c+a[8]*d+a[12])*e;this.y=(a[1]*b+a[5]*c+a[9]*d+a[13])*e;this.z= +(a[2]*b+a[6]*c+a[10]*d+a[14])*e;return this},applyQuaternion:function(a){var b=this.x,c=this.y,d=this.z,e=a.x,f=a.y,g=a.z;a=a.w;var h=a*b+f*d-g*c,k=a*c+g*b-e*d,n=a*d+e*c-f*b,b=-e*b-f*c-g*d;this.x=h*a+b*-e+k*-g-n*-f;this.y=k*a+b*-f+n*-e-h*-g;this.z=n*a+b*-g+h*-f-k*-e;return this},project:function(){var a;return function(b){void 0===a&&(a=new THREE.Matrix4);a.multiplyMatrices(b.projectionMatrix,a.getInverse(b.matrixWorld));return this.applyProjection(a)}}(),unproject:function(){var a;return function(b){void 0=== +a&&(a=new THREE.Matrix4);a.multiplyMatrices(b.matrixWorld,a.getInverse(b.projectionMatrix));return this.applyProjection(a)}}(),transformDirection:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d;this.y=a[1]*b+a[5]*c+a[9]*d;this.z=a[2]*b+a[6]*c+a[10]*d;this.normalize();return this},divide:function(a){this.x/=a.x;this.y/=a.y;this.z/=a.z;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a,this.z*=a):this.z=this.y=this.x=0;return this},min:function(a){this.x> +a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);this.z>a.z&&(this.z=a.z);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);this.zb.z&&(this.z=b.z);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector3,b=new THREE.Vector3);a.set(c,c,c);b.set(d,d,d);return this.clamp(a, +b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);this.z=Math.floor(this.z);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);this.z=Math.ceil(this.z);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);this.z=Math.round(this.z);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);this.z=0>this.z?Math.ceil(this.z):Math.floor(this.z); +return this},negate:function(){this.x=-this.x;this.y=-this.y;this.z=-this.z;return this},dot:function(a){return this.x*a.x+this.y*a.y+this.z*a.z},lengthSq:function(){return this.x*this.x+this.y*this.y+this.z*this.z},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z)},lengthManhattan:function(){return Math.abs(this.x)+Math.abs(this.y)+Math.abs(this.z)},normalize:function(){return this.divideScalar(this.length())},setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/ +b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;this.z+=(a.z-this.z)*b;return this},cross:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .cross() now only accepts one argument. Use .crossVectors( a, b ) instead."),this.crossVectors(a,b);var c=this.x,d=this.y,e=this.z;this.x=d*a.z-e*a.y;this.y=e*a.x-c*a.z;this.z=c*a.y-d*a.x;return this},crossVectors:function(a,b){var c=a.x,d=a.y,e=a.z,f=b.x,g=b.y,h=b.z;this.x=d*h-e*g;this.y=e*f-c*h;this.z=c*g-d*f;return this}, +projectOnVector:function(){var a,b;return function(c){void 0===a&&(a=new THREE.Vector3);a.copy(c).normalize();b=this.dot(a);return this.copy(a).multiplyScalar(b)}}(),projectOnPlane:function(){var a;return function(b){void 0===a&&(a=new THREE.Vector3);a.copy(this).projectOnVector(b);return this.sub(a)}}(),reflect:function(){var a;return function(b){void 0===a&&(a=new THREE.Vector3);return this.sub(a.copy(b).multiplyScalar(2*this.dot(b)))}}(),angleTo:function(a){a=this.dot(a)/(this.length()*a.length()); +return Math.acos(THREE.Math.clamp(a,-1,1))},distanceTo:function(a){return Math.sqrt(this.distanceToSquared(a))},distanceToSquared:function(a){var b=this.x-a.x,c=this.y-a.y;a=this.z-a.z;return b*b+c*c+a*a},setEulerFromRotationMatrix:function(a,b){console.error("THREE.Vector3: .setEulerFromRotationMatrix() has been removed. Use Euler.setFromRotationMatrix() instead.")},setEulerFromQuaternion:function(a,b){console.error("THREE.Vector3: .setEulerFromQuaternion() has been removed. Use Euler.setFromQuaternion() instead.")}, +getPositionFromMatrix:function(a){console.warn("THREE.Vector3: .getPositionFromMatrix() has been renamed to .setFromMatrixPosition().");return this.setFromMatrixPosition(a)},getScaleFromMatrix:function(a){console.warn("THREE.Vector3: .getScaleFromMatrix() has been renamed to .setFromMatrixScale().");return this.setFromMatrixScale(a)},getColumnFromMatrix:function(a,b){console.warn("THREE.Vector3: .getColumnFromMatrix() has been renamed to .setFromMatrixColumn().");return this.setFromMatrixColumn(a, +b)},setFromMatrixPosition:function(a){this.x=a.elements[12];this.y=a.elements[13];this.z=a.elements[14];return this},setFromMatrixScale:function(a){var b=this.set(a.elements[0],a.elements[1],a.elements[2]).length(),c=this.set(a.elements[4],a.elements[5],a.elements[6]).length();a=this.set(a.elements[8],a.elements[9],a.elements[10]).length();this.x=b;this.y=c;this.z=a;return this},setFromMatrixColumn:function(a,b){var c=4*a,d=b.elements;this.x=d[c];this.y=d[c+1];this.z=d[c+2];return this},equals:function(a){return a.x=== +this.x&&a.y===this.y&&a.z===this.z},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];this.z=a[b+2];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;a[b+2]=this.z;return a},clone:function(){return new THREE.Vector3(this.x,this.y,this.z)}};THREE.Vector4=function(a,b,c,d){this.x=a||0;this.y=b||0;this.z=c||0;this.w=void 0!==d?d:1}; +THREE.Vector4.prototype={constructor:THREE.Vector4,set:function(a,b,c,d){this.x=a;this.y=b;this.z=c;this.w=d;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setZ:function(a){this.z=a;return this},setW:function(a){this.w=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;case 2:this.z=b;break;case 3:this.w=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x; +case 1:return this.y;case 2:return this.z;case 3:return this.w;default:throw Error("index is out of range: "+a);}},copy:function(a){this.x=a.x;this.y=a.y;this.z=a.z;this.w=void 0!==a.w?a.w:1;return this},add:function(a,b){if(void 0!==b)return console.warn("THREE.Vector4: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;this.z+=a.z;this.w+=a.w;return this},addScalar:function(a){this.x+=a;this.y+=a;this.z+=a;this.w+=a;return this}, +addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;this.z=a.z+b.z;this.w=a.w+b.w;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector4: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(a,b);this.x-=a.x;this.y-=a.y;this.z-=a.z;this.w-=a.w;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;this.z=a.z-b.z;this.w=a.w-b.w;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;this.z*=a;this.w*=a;return this},applyMatrix4:function(a){var b= +this.x,c=this.y,d=this.z,e=this.w;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d+a[12]*e;this.y=a[1]*b+a[5]*c+a[9]*d+a[13]*e;this.z=a[2]*b+a[6]*c+a[10]*d+a[14]*e;this.w=a[3]*b+a[7]*c+a[11]*d+a[15]*e;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a,this.z*=a,this.w*=a):(this.z=this.y=this.x=0,this.w=1);return this},setAxisAngleFromQuaternion:function(a){this.w=2*Math.acos(a.w);var b=Math.sqrt(1-a.w*a.w);1E-4>b?(this.x=1,this.z=this.y=0):(this.x=a.x/b,this.y=a.y/b,this.z=a.z/b);return this}, +setAxisAngleFromRotationMatrix:function(a){var b,c,d;a=a.elements;var e=a[0];d=a[4];var f=a[8],g=a[1],h=a[5],k=a[9];c=a[2];b=a[6];var n=a[10];if(.01>Math.abs(d-g)&&.01>Math.abs(f-c)&&.01>Math.abs(k-b)){if(.1>Math.abs(d+g)&&.1>Math.abs(f+c)&&.1>Math.abs(k+b)&&.1>Math.abs(e+h+n-3))return this.set(1,0,0,0),this;a=Math.PI;e=(e+1)/2;h=(h+1)/2;n=(n+1)/2;d=(d+g)/4;f=(f+c)/4;k=(k+b)/4;e>h&&e>n?.01>e?(b=0,d=c=.707106781):(b=Math.sqrt(e),c=d/b,d=f/b):h>n?.01>h?(b=.707106781,c=0,d=.707106781):(c=Math.sqrt(h), +b=d/c,d=k/c):.01>n?(c=b=.707106781,d=0):(d=Math.sqrt(n),b=f/d,c=k/d);this.set(b,c,d,a);return this}a=Math.sqrt((b-k)*(b-k)+(f-c)*(f-c)+(g-d)*(g-d));.001>Math.abs(a)&&(a=1);this.x=(b-k)/a;this.y=(f-c)/a;this.z=(g-d)/a;this.w=Math.acos((e+h+n-1)/2);return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);this.z>a.z&&(this.z=a.z);this.w>a.w&&(this.w=a.w);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);this.zb.z&&(this.z=b.z);this.wb.w&&(this.w=b.w);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector4,b=new THREE.Vector4);a.set(c,c,c,c);b.set(d,d,d,d);return this.clamp(a,b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);this.z=Math.floor(this.z);this.w=Math.floor(this.w); +return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);this.z=Math.ceil(this.z);this.w=Math.ceil(this.w);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);this.z=Math.round(this.z);this.w=Math.round(this.w);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);this.z=0>this.z?Math.ceil(this.z):Math.floor(this.z);this.w=0>this.w?Math.ceil(this.w):Math.floor(this.w); +return this},negate:function(){this.x=-this.x;this.y=-this.y;this.z=-this.z;this.w=-this.w;return this},dot:function(a){return this.x*a.x+this.y*a.y+this.z*a.z+this.w*a.w},lengthSq:function(){return this.x*this.x+this.y*this.y+this.z*this.z+this.w*this.w},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z+this.w*this.w)},lengthManhattan:function(){return Math.abs(this.x)+Math.abs(this.y)+Math.abs(this.z)+Math.abs(this.w)},normalize:function(){return this.divideScalar(this.length())}, +setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;this.z+=(a.z-this.z)*b;this.w+=(a.w-this.w)*b;return this},equals:function(a){return a.x===this.x&&a.y===this.y&&a.z===this.z&&a.w===this.w},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];this.z=a[b+2];this.w=a[b+3];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;a[b+2]= +this.z;a[b+3]=this.w;return a},clone:function(){return new THREE.Vector4(this.x,this.y,this.z,this.w)}};THREE.Euler=function(a,b,c,d){this._x=a||0;this._y=b||0;this._z=c||0;this._order=d||THREE.Euler.DefaultOrder};THREE.Euler.RotationOrders="XYZ YZX ZXY XZY YXZ ZYX".split(" ");THREE.Euler.DefaultOrder="XYZ"; +THREE.Euler.prototype={constructor:THREE.Euler,_x:0,_y:0,_z:0,_order:THREE.Euler.DefaultOrder,get x(){return this._x},set x(a){this._x=a;this.onChangeCallback()},get y(){return this._y},set y(a){this._y=a;this.onChangeCallback()},get z(){return this._z},set z(a){this._z=a;this.onChangeCallback()},get order(){return this._order},set order(a){this._order=a;this.onChangeCallback()},set:function(a,b,c,d){this._x=a;this._y=b;this._z=c;this._order=d||this._order;this.onChangeCallback();return this},copy:function(a){this._x= +a._x;this._y=a._y;this._z=a._z;this._order=a._order;this.onChangeCallback();return this},setFromRotationMatrix:function(a,b){var c=THREE.Math.clamp,d=a.elements,e=d[0],f=d[4],g=d[8],h=d[1],k=d[5],n=d[9],p=d[2],q=d[6],d=d[10];b=b||this._order;"XYZ"===b?(this._y=Math.asin(c(g,-1,1)),.99999>Math.abs(g)?(this._x=Math.atan2(-n,d),this._z=Math.atan2(-f,e)):(this._x=Math.atan2(q,k),this._z=0)):"YXZ"===b?(this._x=Math.asin(-c(n,-1,1)),.99999>Math.abs(n)?(this._y=Math.atan2(g,d),this._z=Math.atan2(h,k)):(this._y= +Math.atan2(-p,e),this._z=0)):"ZXY"===b?(this._x=Math.asin(c(q,-1,1)),.99999>Math.abs(q)?(this._y=Math.atan2(-p,d),this._z=Math.atan2(-f,k)):(this._y=0,this._z=Math.atan2(h,e))):"ZYX"===b?(this._y=Math.asin(-c(p,-1,1)),.99999>Math.abs(p)?(this._x=Math.atan2(q,d),this._z=Math.atan2(h,e)):(this._x=0,this._z=Math.atan2(-f,k))):"YZX"===b?(this._z=Math.asin(c(h,-1,1)),.99999>Math.abs(h)?(this._x=Math.atan2(-n,k),this._y=Math.atan2(-p,e)):(this._x=0,this._y=Math.atan2(g,d))):"XZY"===b?(this._z=Math.asin(-c(f, +-1,1)),.99999>Math.abs(f)?(this._x=Math.atan2(q,k),this._y=Math.atan2(g,e)):(this._x=Math.atan2(-n,d),this._y=0)):console.warn("THREE.Euler: .setFromRotationMatrix() given unsupported order: "+b);this._order=b;this.onChangeCallback();return this},setFromQuaternion:function(a,b,c){var d=THREE.Math.clamp,e=a.x*a.x,f=a.y*a.y,g=a.z*a.z,h=a.w*a.w;b=b||this._order;"XYZ"===b?(this._x=Math.atan2(2*(a.x*a.w-a.y*a.z),h-e-f+g),this._y=Math.asin(d(2*(a.x*a.z+a.y*a.w),-1,1)),this._z=Math.atan2(2*(a.z*a.w-a.x* +a.y),h+e-f-g)):"YXZ"===b?(this._x=Math.asin(d(2*(a.x*a.w-a.y*a.z),-1,1)),this._y=Math.atan2(2*(a.x*a.z+a.y*a.w),h-e-f+g),this._z=Math.atan2(2*(a.x*a.y+a.z*a.w),h-e+f-g)):"ZXY"===b?(this._x=Math.asin(d(2*(a.x*a.w+a.y*a.z),-1,1)),this._y=Math.atan2(2*(a.y*a.w-a.z*a.x),h-e-f+g),this._z=Math.atan2(2*(a.z*a.w-a.x*a.y),h-e+f-g)):"ZYX"===b?(this._x=Math.atan2(2*(a.x*a.w+a.z*a.y),h-e-f+g),this._y=Math.asin(d(2*(a.y*a.w-a.x*a.z),-1,1)),this._z=Math.atan2(2*(a.x*a.y+a.z*a.w),h+e-f-g)):"YZX"===b?(this._x=Math.atan2(2* +(a.x*a.w-a.z*a.y),h-e+f-g),this._y=Math.atan2(2*(a.y*a.w-a.x*a.z),h+e-f-g),this._z=Math.asin(d(2*(a.x*a.y+a.z*a.w),-1,1))):"XZY"===b?(this._x=Math.atan2(2*(a.x*a.w+a.y*a.z),h-e+f-g),this._y=Math.atan2(2*(a.x*a.z+a.y*a.w),h+e-f-g),this._z=Math.asin(d(2*(a.z*a.w-a.x*a.y),-1,1))):console.warn("THREE.Euler: .setFromQuaternion() given unsupported order: "+b);this._order=b;if(!1!==c)this.onChangeCallback();return this},reorder:function(){var a=new THREE.Quaternion;return function(b){a.setFromEuler(this); +this.setFromQuaternion(a,b)}}(),equals:function(a){return a._x===this._x&&a._y===this._y&&a._z===this._z&&a._order===this._order},fromArray:function(a){this._x=a[0];this._y=a[1];this._z=a[2];void 0!==a[3]&&(this._order=a[3]);this.onChangeCallback();return this},toArray:function(){return[this._x,this._y,this._z,this._order]},onChange:function(a){this.onChangeCallback=a;return this},onChangeCallback:function(){},clone:function(){return new THREE.Euler(this._x,this._y,this._z,this._order)}}; +THREE.Line3=function(a,b){this.start=void 0!==a?a:new THREE.Vector3;this.end=void 0!==b?b:new THREE.Vector3}; +THREE.Line3.prototype={constructor:THREE.Line3,set:function(a,b){this.start.copy(a);this.end.copy(b);return this},copy:function(a){this.start.copy(a.start);this.end.copy(a.end);return this},center:function(a){return(a||new THREE.Vector3).addVectors(this.start,this.end).multiplyScalar(.5)},delta:function(a){return(a||new THREE.Vector3).subVectors(this.end,this.start)},distanceSq:function(){return this.start.distanceToSquared(this.end)},distance:function(){return this.start.distanceTo(this.end)},at:function(a, +b){var c=b||new THREE.Vector3;return this.delta(c).multiplyScalar(a).add(this.start)},closestPointToPointParameter:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c,d){a.subVectors(c,this.start);b.subVectors(this.end,this.start);var e=b.dot(b),e=b.dot(a)/e;d&&(e=THREE.Math.clamp(e,0,1));return e}}(),closestPointToPoint:function(a,b,c){a=this.closestPointToPointParameter(a,b);c=c||new THREE.Vector3;return this.delta(c).multiplyScalar(a).add(this.start)},applyMatrix4:function(a){this.start.applyMatrix4(a); +this.end.applyMatrix4(a);return this},equals:function(a){return a.start.equals(this.start)&&a.end.equals(this.end)},clone:function(){return(new THREE.Line3).copy(this)}};THREE.Box2=function(a,b){this.min=void 0!==a?a:new THREE.Vector2(Infinity,Infinity);this.max=void 0!==b?b:new THREE.Vector2(-Infinity,-Infinity)}; +THREE.Box2.prototype={constructor:THREE.Box2,set:function(a,b){this.min.copy(a);this.max.copy(b);return this},setFromPoints:function(a){this.makeEmpty();for(var b=0,c=a.length;bthis.max.x||a.ythis.max.y?!1:!0},containsBox:function(a){return this.min.x<=a.min.x&&a.max.x<=this.max.x&&this.min.y<=a.min.y&&a.max.y<=this.max.y?!0:!1},getParameter:function(a,b){return(b||new THREE.Vector2).set((a.x-this.min.x)/(this.max.x-this.min.x),(a.y-this.min.y)/(this.max.y-this.min.y))},isIntersectionBox:function(a){return a.max.xthis.max.x||a.max.y +this.max.y?!1:!0},clampPoint:function(a,b){return(b||new THREE.Vector2).copy(a).clamp(this.min,this.max)},distanceToPoint:function(){var a=new THREE.Vector2;return function(b){return a.copy(b).clamp(this.min,this.max).sub(b).length()}}(),intersect:function(a){this.min.max(a.min);this.max.min(a.max);return this},union:function(a){this.min.min(a.min);this.max.max(a.max);return this},translate:function(a){this.min.add(a);this.max.add(a);return this},equals:function(a){return a.min.equals(this.min)&& +a.max.equals(this.max)},clone:function(){return(new THREE.Box2).copy(this)}};THREE.Box3=function(a,b){this.min=void 0!==a?a:new THREE.Vector3(Infinity,Infinity,Infinity);this.max=void 0!==b?b:new THREE.Vector3(-Infinity,-Infinity,-Infinity)}; +THREE.Box3.prototype={constructor:THREE.Box3,set:function(a,b){this.min.copy(a);this.max.copy(b);return this},setFromPoints:function(a){this.makeEmpty();for(var b=0,c=a.length;bthis.max.x||a.ythis.max.y||a.zthis.max.z?!1:!0},containsBox:function(a){return this.min.x<=a.min.x&&a.max.x<=this.max.x&&this.min.y<=a.min.y&&a.max.y<=this.max.y&&this.min.z<=a.min.z&&a.max.z<=this.max.z?!0:!1},getParameter:function(a,b){return(b||new THREE.Vector3).set((a.x-this.min.x)/(this.max.x- +this.min.x),(a.y-this.min.y)/(this.max.y-this.min.y),(a.z-this.min.z)/(this.max.z-this.min.z))},isIntersectionBox:function(a){return a.max.xthis.max.x||a.max.ythis.max.y||a.max.zthis.max.z?!1:!0},clampPoint:function(a,b){return(b||new THREE.Vector3).copy(a).clamp(this.min,this.max)},distanceToPoint:function(){var a=new THREE.Vector3;return function(b){return a.copy(b).clamp(this.min,this.max).sub(b).length()}}(),getBoundingSphere:function(){var a= +new THREE.Vector3;return function(b){b=b||new THREE.Sphere;b.center=this.center();b.radius=.5*this.size(a).length();return b}}(),intersect:function(a){this.min.max(a.min);this.max.min(a.max);return this},union:function(a){this.min.min(a.min);this.max.max(a.max);return this},applyMatrix4:function(){var a=[new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3];return function(b){a[0].set(this.min.x,this.min.y, +this.min.z).applyMatrix4(b);a[1].set(this.min.x,this.min.y,this.max.z).applyMatrix4(b);a[2].set(this.min.x,this.max.y,this.min.z).applyMatrix4(b);a[3].set(this.min.x,this.max.y,this.max.z).applyMatrix4(b);a[4].set(this.max.x,this.min.y,this.min.z).applyMatrix4(b);a[5].set(this.max.x,this.min.y,this.max.z).applyMatrix4(b);a[6].set(this.max.x,this.max.y,this.min.z).applyMatrix4(b);a[7].set(this.max.x,this.max.y,this.max.z).applyMatrix4(b);this.makeEmpty();this.setFromPoints(a);return this}}(),translate:function(a){this.min.add(a); +this.max.add(a);return this},equals:function(a){return a.min.equals(this.min)&&a.max.equals(this.max)},clone:function(){return(new THREE.Box3).copy(this)}};THREE.Matrix3=function(){this.elements=new Float32Array([1,0,0,0,1,0,0,0,1]);0this.determinant()&&(g=-g);c.x=f[12];c.y=f[13];c.z=f[14];b.elements.set(this.elements);c=1/g;var f=1/h,n=1/k;b.elements[0]*=c;b.elements[1]*= +c;b.elements[2]*=c;b.elements[4]*=f;b.elements[5]*=f;b.elements[6]*=f;b.elements[8]*=n;b.elements[9]*=n;b.elements[10]*=n;d.setFromRotationMatrix(b);e.x=g;e.y=h;e.z=k;return this}}(),makeFrustum:function(a,b,c,d,e,f){var g=this.elements;g[0]=2*e/(b-a);g[4]=0;g[8]=(b+a)/(b-a);g[12]=0;g[1]=0;g[5]=2*e/(d-c);g[9]=(d+c)/(d-c);g[13]=0;g[2]=0;g[6]=0;g[10]=-(f+e)/(f-e);g[14]=-2*f*e/(f-e);g[3]=0;g[7]=0;g[11]=-1;g[15]=0;return this},makePerspective:function(a,b,c,d){a=c*Math.tan(THREE.Math.degToRad(.5*a)); +var e=-a;return this.makeFrustum(e*b,a*b,e,a,c,d)},makeOrthographic:function(a,b,c,d,e,f){var g=this.elements,h=b-a,k=c-d,n=f-e;g[0]=2/h;g[4]=0;g[8]=0;g[12]=-((b+a)/h);g[1]=0;g[5]=2/k;g[9]=0;g[13]=-((c+d)/k);g[2]=0;g[6]=0;g[10]=-2/n;g[14]=-((f+e)/n);g[3]=0;g[7]=0;g[11]=0;g[15]=1;return this},fromArray:function(a){this.elements.set(a);return this},toArray:function(){var a=this.elements;return[a[0],a[1],a[2],a[3],a[4],a[5],a[6],a[7],a[8],a[9],a[10],a[11],a[12],a[13],a[14],a[15]]},clone:function(){return(new THREE.Matrix4).fromArray(this.elements)}}; +THREE.Ray=function(a,b){this.origin=void 0!==a?a:new THREE.Vector3;this.direction=void 0!==b?b:new THREE.Vector3}; +THREE.Ray.prototype={constructor:THREE.Ray,set:function(a,b){this.origin.copy(a);this.direction.copy(b);return this},copy:function(a){this.origin.copy(a.origin);this.direction.copy(a.direction);return this},at:function(a,b){return(b||new THREE.Vector3).copy(this.direction).multiplyScalar(a).add(this.origin)},recast:function(){var a=new THREE.Vector3;return function(b){this.origin.copy(this.at(b,a));return this}}(),closestPointToPoint:function(a,b){var c=b||new THREE.Vector3;c.subVectors(a,this.origin); +var d=c.dot(this.direction);return 0>d?c.copy(this.origin):c.copy(this.direction).multiplyScalar(d).add(this.origin)},distanceToPoint:function(){var a=new THREE.Vector3;return function(b){var c=a.subVectors(b,this.origin).dot(this.direction);if(0>c)return this.origin.distanceTo(b);a.copy(this.direction).multiplyScalar(c).add(this.origin);return a.distanceTo(b)}}(),distanceSqToSegment:function(a,b,c,d){var e=a.clone().add(b).multiplyScalar(.5),f=b.clone().sub(a).normalize(),g=.5*a.distanceTo(b),h= +this.origin.clone().sub(e);a=-this.direction.dot(f);b=h.dot(this.direction);var k=-h.dot(f),n=h.lengthSq(),p=Math.abs(1-a*a),q,m;0<=p?(h=a*k-b,q=a*b-k,m=g*p,0<=h?q>=-m?q<=m?(g=1/p,h*=g,q*=g,a=h*(h+a*q+2*b)+q*(a*h+q+2*k)+n):(q=g,h=Math.max(0,-(a*q+b)),a=-h*h+q*(q+2*k)+n):(q=-g,h=Math.max(0,-(a*q+b)),a=-h*h+q*(q+2*k)+n):q<=-m?(h=Math.max(0,-(-a*g+b)),q=0f)return null;f=Math.sqrt(f-e);e=d-f; +d+=f;return 0>e&&0>d?null:0>e?this.at(d,c):this.at(e,c)}}(),isIntersectionPlane:function(a){var b=a.distanceToPoint(this.origin);return 0===b||0>a.normal.dot(this.direction)*b?!0:!1},distanceToPlane:function(a){var b=a.normal.dot(this.direction);if(0==b)return 0==a.distanceToPoint(this.origin)?0:null;a=-(this.origin.dot(a.normal)+a.constant)/b;return 0<=a?a:null},intersectPlane:function(a,b){var c=this.distanceToPlane(a);return null===c?null:this.at(c,b)},isIntersectionBox:function(){var a=new THREE.Vector3; +return function(b){return null!==this.intersectBox(b,a)}}(),intersectBox:function(a,b){var c,d,e,f,g;d=1/this.direction.x;f=1/this.direction.y;g=1/this.direction.z;var h=this.origin;0<=d?(c=(a.min.x-h.x)*d,d*=a.max.x-h.x):(c=(a.max.x-h.x)*d,d*=a.min.x-h.x);0<=f?(e=(a.min.y-h.y)*f,f*=a.max.y-h.y):(e=(a.max.y-h.y)*f,f*=a.min.y-h.y);if(c>f||e>d)return null;if(e>c||c!==c)c=e;if(fg||e>d)return null;if(e>c||c!== +c)c=e;if(gd?null:this.at(0<=c?c:d,b)},intersectTriangle:function(){var a=new THREE.Vector3,b=new THREE.Vector3,c=new THREE.Vector3,d=new THREE.Vector3;return function(e,f,g,h,k){b.subVectors(f,e);c.subVectors(g,e);d.crossVectors(b,c);f=this.direction.dot(d);if(0f)h=-1,f=-f;else return null;a.subVectors(this.origin,e);e=h*this.direction.dot(c.crossVectors(a,c));if(0>e)return null;g=h*this.direction.dot(b.cross(a));if(0>g||e+g>f)return null; +e=-h*a.dot(d);return 0>e?null:this.at(e/f,k)}}(),applyMatrix4:function(a){this.direction.add(this.origin).applyMatrix4(a);this.origin.applyMatrix4(a);this.direction.sub(this.origin);this.direction.normalize();return this},equals:function(a){return a.origin.equals(this.origin)&&a.direction.equals(this.direction)},clone:function(){return(new THREE.Ray).copy(this)}};THREE.Sphere=function(a,b){this.center=void 0!==a?a:new THREE.Vector3;this.radius=void 0!==b?b:0}; +THREE.Sphere.prototype={constructor:THREE.Sphere,set:function(a,b){this.center.copy(a);this.radius=b;return this},setFromPoints:function(){var a=new THREE.Box3;return function(b,c){var d=this.center;void 0!==c?d.copy(c):a.setFromPoints(b).center(d);for(var e=0,f=0,g=b.length;f=this.radius},containsPoint:function(a){return a.distanceToSquared(this.center)<= +this.radius*this.radius},distanceToPoint:function(a){return a.distanceTo(this.center)-this.radius},intersectsSphere:function(a){var b=this.radius+a.radius;return a.center.distanceToSquared(this.center)<=b*b},clampPoint:function(a,b){var c=this.center.distanceToSquared(a),d=b||new THREE.Vector3;d.copy(a);c>this.radius*this.radius&&(d.sub(this.center).normalize(),d.multiplyScalar(this.radius).add(this.center));return d},getBoundingBox:function(a){a=a||new THREE.Box3;a.set(this.center,this.center);a.expandByScalar(this.radius); +return a},applyMatrix4:function(a){this.center.applyMatrix4(a);this.radius*=a.getMaxScaleOnAxis();return this},translate:function(a){this.center.add(a);return this},equals:function(a){return a.center.equals(this.center)&&a.radius===this.radius},clone:function(){return(new THREE.Sphere).copy(this)}}; +THREE.Frustum=function(a,b,c,d,e,f){this.planes=[void 0!==a?a:new THREE.Plane,void 0!==b?b:new THREE.Plane,void 0!==c?c:new THREE.Plane,void 0!==d?d:new THREE.Plane,void 0!==e?e:new THREE.Plane,void 0!==f?f:new THREE.Plane]}; +THREE.Frustum.prototype={constructor:THREE.Frustum,set:function(a,b,c,d,e,f){var g=this.planes;g[0].copy(a);g[1].copy(b);g[2].copy(c);g[3].copy(d);g[4].copy(e);g[5].copy(f);return this},copy:function(a){for(var b=this.planes,c=0;6>c;c++)b[c].copy(a.planes[c]);return this},setFromMatrix:function(a){var b=this.planes,c=a.elements;a=c[0];var d=c[1],e=c[2],f=c[3],g=c[4],h=c[5],k=c[6],n=c[7],p=c[8],q=c[9],m=c[10],r=c[11],t=c[12],s=c[13],u=c[14],c=c[15];b[0].setComponents(f-a,n-g,r-p,c-t).normalize();b[1].setComponents(f+ +a,n+g,r+p,c+t).normalize();b[2].setComponents(f+d,n+h,r+q,c+s).normalize();b[3].setComponents(f-d,n-h,r-q,c-s).normalize();b[4].setComponents(f-e,n-k,r-m,c-u).normalize();b[5].setComponents(f+e,n+k,r+m,c+u).normalize();return this},intersectsObject:function(){var a=new THREE.Sphere;return function(b){var c=b.geometry;null===c.boundingSphere&&c.computeBoundingSphere();a.copy(c.boundingSphere);a.applyMatrix4(b.matrixWorld);return this.intersectsSphere(a)}}(),intersectsSphere:function(a){var b=this.planes, +c=a.center;a=-a.radius;for(var d=0;6>d;d++)if(b[d].distanceToPoint(c)e;e++){var f=d[e];a.x=0g&&0>f)return!1}return!0}}(), +containsPoint:function(a){for(var b=this.planes,c=0;6>c;c++)if(0>b[c].distanceToPoint(a))return!1;return!0},clone:function(){return(new THREE.Frustum).copy(this)}};THREE.Plane=function(a,b){this.normal=void 0!==a?a:new THREE.Vector3(1,0,0);this.constant=void 0!==b?b:0}; +THREE.Plane.prototype={constructor:THREE.Plane,set:function(a,b){this.normal.copy(a);this.constant=b;return this},setComponents:function(a,b,c,d){this.normal.set(a,b,c);this.constant=d;return this},setFromNormalAndCoplanarPoint:function(a,b){this.normal.copy(a);this.constant=-b.dot(this.normal);return this},setFromCoplanarPoints:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c,d,e){d=a.subVectors(e,d).cross(b.subVectors(c,d)).normalize();this.setFromNormalAndCoplanarPoint(d, +c);return this}}(),copy:function(a){this.normal.copy(a.normal);this.constant=a.constant;return this},normalize:function(){var a=1/this.normal.length();this.normal.multiplyScalar(a);this.constant*=a;return this},negate:function(){this.constant*=-1;this.normal.negate();return this},distanceToPoint:function(a){return this.normal.dot(a)+this.constant},distanceToSphere:function(a){return this.distanceToPoint(a.center)-a.radius},projectPoint:function(a,b){return this.orthoPoint(a,b).sub(a).negate()},orthoPoint:function(a, +b){var c=this.distanceToPoint(a);return(b||new THREE.Vector3).copy(this.normal).multiplyScalar(c)},isIntersectionLine:function(a){var b=this.distanceToPoint(a.start);a=this.distanceToPoint(a.end);return 0>b&&0a&&0f||1e;e++)8==e||13==e||18==e||23==e?b[e]="-":14==e?b[e]="4":(2>=c&&(c=33554432+16777216*Math.random()|0),d=c&15,c>>=4,b[e]=a[19==e?d&3|8:d]);return b.join("")}}(),clamp:function(a,b,c){return ac?c:a},clampBottom:function(a,b){return a=c)return 1;a=(a-b)/(c-b);return a*a*(3-2*a)},smootherstep:function(a,b,c){if(a<=b)return 0;if(a>=c)return 1;a=(a-b)/(c-b);return a*a*a*(a*(6*a-15)+10)},random16:function(){return(65280*Math.random()+255*Math.random())/65535},randInt:function(a,b){return a+Math.floor(Math.random()*(b-a+1))},randFloat:function(a,b){return a+Math.random()*(b-a)},randFloatSpread:function(a){return a*(.5-Math.random())},degToRad:function(){var a=Math.PI/180;return function(b){return b*a}}(),radToDeg:function(){var a= +180/Math.PI;return function(b){return b*a}}(),isPowerOfTwo:function(a){return 0===(a&a-1)&&0!==a}}; +THREE.Spline=function(a){function b(a,b,c,d,e,f,g){a=.5*(c-a);d=.5*(d-b);return(2*(b-c)+a+d)*g+(-3*(b-c)-2*a-d)*f+a*e+b}this.points=a;var c=[],d={x:0,y:0,z:0},e,f,g,h,k,n,p,q,m;this.initFromArray=function(a){this.points=[];for(var b=0;bthis.points.length-2?this.points.length-1:f+1;c[3]=f>this.points.length-3?this.points.length-1:f+ +2;n=this.points[c[0]];p=this.points[c[1]];q=this.points[c[2]];m=this.points[c[3]];h=g*g;k=g*h;d.x=b(n.x,p.x,q.x,m.x,g,h,k);d.y=b(n.y,p.y,q.y,m.y,g,h,k);d.z=b(n.z,p.z,q.z,m.z,g,h,k);return d};this.getControlPointsArray=function(){var a,b,c=this.points.length,d=[];for(a=0;a=b.x+b.y}}(); +THREE.Triangle.prototype={constructor:THREE.Triangle,set:function(a,b,c){this.a.copy(a);this.b.copy(b);this.c.copy(c);return this},setFromPointsAndIndices:function(a,b,c,d){this.a.copy(a[b]);this.b.copy(a[c]);this.c.copy(a[d]);return this},copy:function(a){this.a.copy(a.a);this.b.copy(a.b);this.c.copy(a.c);return this},area:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(){a.subVectors(this.c,this.b);b.subVectors(this.a,this.b);return.5*a.cross(b).length()}}(),midpoint:function(a){return(a|| +new THREE.Vector3).addVectors(this.a,this.b).add(this.c).multiplyScalar(1/3)},normal:function(a){return THREE.Triangle.normal(this.a,this.b,this.c,a)},plane:function(a){return(a||new THREE.Plane).setFromCoplanarPoints(this.a,this.b,this.c)},barycoordFromPoint:function(a,b){return THREE.Triangle.barycoordFromPoint(a,this.a,this.b,this.c,b)},containsPoint:function(a){return THREE.Triangle.containsPoint(a,this.a,this.b,this.c)},equals:function(a){return a.a.equals(this.a)&&a.b.equals(this.b)&&a.c.equals(this.c)}, +clone:function(){return(new THREE.Triangle).copy(this)}};THREE.Clock=function(a){this.autoStart=void 0!==a?a:!0;this.elapsedTime=this.oldTime=this.startTime=0;this.running=!1}; +THREE.Clock.prototype={constructor:THREE.Clock,start:function(){this.oldTime=this.startTime=void 0!==self.performance&&void 0!==self.performance.now?self.performance.now():Date.now();this.running=!0},stop:function(){this.getElapsedTime();this.running=!1},getElapsedTime:function(){this.getDelta();return this.elapsedTime},getDelta:function(){var a=0;this.autoStart&&!this.running&&this.start();if(this.running){var b=void 0!==self.performance&&void 0!==self.performance.now?self.performance.now():Date.now(), +a=.001*(b-this.oldTime);this.oldTime=b;this.elapsedTime+=a}return a}};THREE.EventDispatcher=function(){}; +THREE.EventDispatcher.prototype={constructor:THREE.EventDispatcher,apply:function(a){a.addEventListener=THREE.EventDispatcher.prototype.addEventListener;a.hasEventListener=THREE.EventDispatcher.prototype.hasEventListener;a.removeEventListener=THREE.EventDispatcher.prototype.removeEventListener;a.dispatchEvent=THREE.EventDispatcher.prototype.dispatchEvent},addEventListener:function(a,b){void 0===this._listeners&&(this._listeners={});var c=this._listeners;void 0===c[a]&&(c[a]=[]);-1===c[a].indexOf(b)&& +c[a].push(b)},hasEventListener:function(a,b){if(void 0===this._listeners)return!1;var c=this._listeners;return void 0!==c[a]&&-1!==c[a].indexOf(b)?!0:!1},removeEventListener:function(a,b){if(void 0!==this._listeners){var c=this._listeners[a];if(void 0!==c){var d=c.indexOf(b);-1!==d&&c.splice(d,1)}}},dispatchEvent:function(a){if(void 0!==this._listeners){var b=this._listeners[a.type];if(void 0!==b){a.target=this;for(var c=[],d=b.length,e=0;eza?-1:1;h[4*a]=la.x;h[4*a+1]=la.y;h[4*a+2]=la.z;h[4*a+3]=Ga}if(void 0===this.attributes.index||void 0===this.attributes.position||void 0===this.attributes.normal||void 0===this.attributes.uv)console.warn("Missing required attributes (index, position, normal or uv) in BufferGeometry.computeTangents()");else{var c=this.attributes.index.array,d=this.attributes.position.array, +e=this.attributes.normal.array,f=this.attributes.uv.array,g=d.length/3;void 0===this.attributes.tangent&&this.addAttribute("tangent",new THREE.BufferAttribute(new Float32Array(4*g),4));for(var h=this.attributes.tangent.array,k=[],n=[],p=0;ps;s++)t=a[3*c+s],-1==m[t]?(q[2*s]=t,q[2*s+1]=-1,p++):m[t]k.index+b)for(k={start:f,count:0,index:g},h.push(k),p=0;6>p;p+=2)s=q[p+1],-1p;p+=2)t=q[p],s=q[p+1],-1===s&&(s=g++),m[t]=s,r[s]=t,e[f++]=s-k.index,k.count++}this.reorderBuffers(e,r,g);return this.offsets=h},merge:function(){console.log("BufferGeometry.merge(): TODO")},normalizeNormals:function(){for(var a=this.attributes.normal.array,b,c,d,e=0,f=a.length;ed?-1:1,e.vertexTangents[c]=new THREE.Vector4(w.x,w.y,w.z,d);this.hasTangents=!0},computeLineDistances:function(){for(var a=0,b=this.vertices,c=0,d=b.length;cd;d++)if(e[d]==e[(d+1)%3]){a.push(f);break}for(f=a.length-1;0<=f;f--)for(e=a[f],this.faces.splice(e,1),c=0,g=this.faceVertexUvs.length;ca.opacity)h.transparent=a.transparent;void 0!==a.depthTest&&(h.depthTest=a.depthTest);void 0!==a.depthWrite&&(h.depthWrite=a.depthWrite);void 0!==a.visible&&(h.visible=a.visible);void 0!==a.flipSided&&(h.side=THREE.BackSide);void 0!==a.doubleSided&&(h.side=THREE.DoubleSide);void 0!==a.wireframe&&(h.wireframe=a.wireframe);void 0!==a.vertexColors&&("face"=== +a.vertexColors?h.vertexColors=THREE.FaceColors:a.vertexColors&&(h.vertexColors=THREE.VertexColors));a.colorDiffuse?h.color=e(a.colorDiffuse):a.DbgColor&&(h.color=a.DbgColor);a.colorSpecular&&(h.specular=e(a.colorSpecular));a.colorAmbient&&(h.ambient=e(a.colorAmbient));a.colorEmissive&&(h.emissive=e(a.colorEmissive));a.transparency&&(h.opacity=a.transparency);a.specularCoef&&(h.shininess=a.specularCoef);a.mapDiffuse&&b&&d(h,"map",a.mapDiffuse,a.mapDiffuseRepeat,a.mapDiffuseOffset,a.mapDiffuseWrap, +a.mapDiffuseAnisotropy);a.mapLight&&b&&d(h,"lightMap",a.mapLight,a.mapLightRepeat,a.mapLightOffset,a.mapLightWrap,a.mapLightAnisotropy);a.mapBump&&b&&d(h,"bumpMap",a.mapBump,a.mapBumpRepeat,a.mapBumpOffset,a.mapBumpWrap,a.mapBumpAnisotropy);a.mapNormal&&b&&d(h,"normalMap",a.mapNormal,a.mapNormalRepeat,a.mapNormalOffset,a.mapNormalWrap,a.mapNormalAnisotropy);a.mapSpecular&&b&&d(h,"specularMap",a.mapSpecular,a.mapSpecularRepeat,a.mapSpecularOffset,a.mapSpecularWrap,a.mapSpecularAnisotropy);a.mapAlpha&& +b&&d(h,"alphaMap",a.mapAlpha,a.mapAlphaRepeat,a.mapAlphaOffset,a.mapAlphaWrap,a.mapAlphaAnisotropy);a.mapBumpScale&&(h.bumpScale=a.mapBumpScale);a.mapNormal?(g=THREE.ShaderLib.normalmap,k=THREE.UniformsUtils.clone(g.uniforms),k.tNormal.value=h.normalMap,a.mapNormalFactor&&k.uNormalScale.value.set(a.mapNormalFactor,a.mapNormalFactor),h.map&&(k.tDiffuse.value=h.map,k.enableDiffuse.value=!0),h.specularMap&&(k.tSpecular.value=h.specularMap,k.enableSpecular.value=!0),h.lightMap&&(k.tAO.value=h.lightMap, +k.enableAO.value=!0),k.diffuse.value.setHex(h.color),k.specular.value.setHex(h.specular),k.ambient.value.setHex(h.ambient),k.shininess.value=h.shininess,void 0!==h.opacity&&(k.opacity.value=h.opacity),g=new THREE.ShaderMaterial({fragmentShader:g.fragmentShader,vertexShader:g.vertexShader,uniforms:k,lights:!0,fog:!0}),h.transparent&&(g.transparent=!0)):g=new THREE[g](h);void 0!==a.DbgName&&(g.name=a.DbgName);return g}}; +THREE.Loader.Handlers={handlers:[],add:function(a,b){this.handlers.push(a,b)},get:function(a){for(var b=0,c=this.handlers.length;bg;g++)m=y[k++],v=u[2*m],m=u[2*m+1],v=new THREE.Vector2(v,m),2!==g&&c.faceVertexUvs[d][h].push(v),0!==g&&c.faceVertexUvs[d][h+1].push(v);q&&(q=3*y[k++],r.normal.set(G[q++],G[q++],G[q]),s.normal.copy(r.normal));if(t)for(d=0;4>d;d++)q=3*y[k++],t=new THREE.Vector3(G[q++], +G[q++],G[q]),2!==d&&r.vertexNormals.push(t),0!==d&&s.vertexNormals.push(t);p&&(p=y[k++],p=w[p],r.color.setHex(p),s.color.setHex(p));if(b)for(d=0;4>d;d++)p=y[k++],p=w[p],2!==d&&r.vertexColors.push(new THREE.Color(p)),0!==d&&s.vertexColors.push(new THREE.Color(p));c.faces.push(r);c.faces.push(s)}else{r=new THREE.Face3;r.a=y[k++];r.b=y[k++];r.c=y[k++];h&&(h=y[k++],r.materialIndex=h);h=c.faces.length;if(d)for(d=0;dg;g++)m=y[k++],v=u[2*m],m=u[2*m+1], +v=new THREE.Vector2(v,m),c.faceVertexUvs[d][h].push(v);q&&(q=3*y[k++],r.normal.set(G[q++],G[q++],G[q]));if(t)for(d=0;3>d;d++)q=3*y[k++],t=new THREE.Vector3(G[q++],G[q++],G[q]),r.vertexNormals.push(t);p&&(p=y[k++],r.color.setHex(w[p]));if(b)for(d=0;3>d;d++)p=y[k++],r.vertexColors.push(new THREE.Color(w[p]));c.faces.push(r)}})(d);(function(){var b=void 0!==a.influencesPerVertex?a.influencesPerVertex:2;if(a.skinWeights)for(var d=0,g=a.skinWeights.length;dthis.opacity&&(a.opacity=this.opacity);!1!==this.transparent&&(a.transparent=this.transparent);!1!==this.wireframe&&(a.wireframe=this.wireframe);return a},clone:function(a){void 0===a&&(a=new THREE.Material);a.name=this.name;a.side=this.side;a.opacity=this.opacity;a.transparent=this.transparent;a.blending=this.blending;a.blendSrc=this.blendSrc;a.blendDst=this.blendDst;a.blendEquation=this.blendEquation;a.depthTest=this.depthTest;a.depthWrite=this.depthWrite;a.polygonOffset=this.polygonOffset;a.polygonOffsetFactor= +this.polygonOffsetFactor;a.polygonOffsetUnits=this.polygonOffsetUnits;a.alphaTest=this.alphaTest;a.overdraw=this.overdraw;a.visible=this.visible;return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.Material.prototype);THREE.MaterialIdCount=0; +THREE.LineBasicMaterial=function(a){THREE.Material.call(this);this.type="LineBasicMaterial";this.color=new THREE.Color(16777215);this.linewidth=1;this.linejoin=this.linecap="round";this.vertexColors=THREE.NoColors;this.fog=!0;this.setValues(a)};THREE.LineBasicMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.LineBasicMaterial.prototype.clone=function(){var a=new THREE.LineBasicMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.linewidth=this.linewidth;a.linecap=this.linecap;a.linejoin=this.linejoin;a.vertexColors=this.vertexColors;a.fog=this.fog;return a}; +THREE.LineDashedMaterial=function(a){THREE.Material.call(this);this.type="LineDashedMaterial";this.color=new THREE.Color(16777215);this.scale=this.linewidth=1;this.dashSize=3;this.gapSize=1;this.vertexColors=!1;this.fog=!0;this.setValues(a)};THREE.LineDashedMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.LineDashedMaterial.prototype.clone=function(){var a=new THREE.LineDashedMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.linewidth=this.linewidth;a.scale=this.scale;a.dashSize=this.dashSize;a.gapSize=this.gapSize;a.vertexColors=this.vertexColors;a.fog=this.fog;return a}; +THREE.MeshBasicMaterial=function(a){THREE.Material.call(this);this.type="MeshBasicMaterial";this.color=new THREE.Color(16777215);this.envMap=this.alphaMap=this.specularMap=this.lightMap=this.map=null;this.combine=THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphTargets=this.skinning=!1;this.setValues(a)}; +THREE.MeshBasicMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshBasicMaterial.prototype.clone=function(){var a=new THREE.MeshBasicMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.map=this.map;a.lightMap=this.lightMap;a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap; +a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;return a}; +THREE.MeshLambertMaterial=function(a){THREE.Material.call(this);this.type="MeshLambertMaterial";this.color=new THREE.Color(16777215);this.ambient=new THREE.Color(16777215);this.emissive=new THREE.Color(0);this.wrapAround=!1;this.wrapRGB=new THREE.Vector3(1,1,1);this.envMap=this.alphaMap=this.specularMap=this.lightMap=this.map=null;this.combine=THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth= +1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphNormals=this.morphTargets=this.skinning=!1;this.setValues(a)};THREE.MeshLambertMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshLambertMaterial.prototype.clone=function(){var a=new THREE.MeshLambertMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.ambient.copy(this.ambient);a.emissive.copy(this.emissive);a.wrapAround=this.wrapAround;a.wrapRGB.copy(this.wrapRGB);a.map=this.map;a.lightMap=this.lightMap;a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog; +a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap;a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;a.morphNormals=this.morphNormals;return a}; +THREE.MeshPhongMaterial=function(a){THREE.Material.call(this);this.type="MeshPhongMaterial";this.color=new THREE.Color(16777215);this.ambient=new THREE.Color(16777215);this.emissive=new THREE.Color(0);this.specular=new THREE.Color(1118481);this.shininess=30;this.wrapAround=this.metal=!1;this.wrapRGB=new THREE.Vector3(1,1,1);this.bumpMap=this.lightMap=this.map=null;this.bumpScale=1;this.normalMap=null;this.normalScale=new THREE.Vector2(1,1);this.envMap=this.alphaMap=this.specularMap=null;this.combine= +THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphNormals=this.morphTargets=this.skinning=!1;this.setValues(a)};THREE.MeshPhongMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshPhongMaterial.prototype.clone=function(){var a=new THREE.MeshPhongMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.ambient.copy(this.ambient);a.emissive.copy(this.emissive);a.specular.copy(this.specular);a.shininess=this.shininess;a.metal=this.metal;a.wrapAround=this.wrapAround;a.wrapRGB.copy(this.wrapRGB);a.map=this.map;a.lightMap=this.lightMap;a.bumpMap=this.bumpMap;a.bumpScale=this.bumpScale;a.normalMap=this.normalMap;a.normalScale.copy(this.normalScale); +a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap;a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;a.morphNormals=this.morphNormals;return a}; +THREE.MeshDepthMaterial=function(a){THREE.Material.call(this);this.type="MeshDepthMaterial";this.wireframe=this.morphTargets=!1;this.wireframeLinewidth=1;this.setValues(a)};THREE.MeshDepthMaterial.prototype=Object.create(THREE.Material.prototype);THREE.MeshDepthMaterial.prototype.clone=function(){var a=new THREE.MeshDepthMaterial;THREE.Material.prototype.clone.call(this,a);a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;return a}; +THREE.MeshNormalMaterial=function(a){THREE.Material.call(this,a);this.type="MeshNormalMaterial";this.shading=THREE.FlatShading;this.wireframe=!1;this.wireframeLinewidth=1;this.morphTargets=!1;this.setValues(a)};THREE.MeshNormalMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshNormalMaterial.prototype.clone=function(){var a=new THREE.MeshNormalMaterial;THREE.Material.prototype.clone.call(this,a);a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;return a};THREE.MeshFaceMaterial=function(a){this.uuid=THREE.Math.generateUUID();this.type="MeshFaceMaterial";this.materials=a instanceof Array?a:[]}; +THREE.MeshFaceMaterial.prototype={constructor:THREE.MeshFaceMaterial,toJSON:function(){for(var a={metadata:{version:4.2,type:"material",generator:"MaterialExporter"},uuid:this.uuid,type:this.type,materials:[]},b=0,c=this.materials.length;bf)){var m=b.origin.distanceTo(n);md.far||e.push({distance:m,point:k.clone().applyMatrix4(this.matrixWorld),face:null,faceIndex:null,object:this})}}}();THREE.Line.prototype.clone=function(a){void 0===a&&(a=new THREE.Line(this.geometry,this.material,this.mode));THREE.Object3D.prototype.clone.call(this,a);return a}; +THREE.Mesh=function(a,b){THREE.Object3D.call(this);this.type="Mesh";this.geometry=void 0!==a?a:new THREE.Geometry;this.material=void 0!==b?b:new THREE.MeshBasicMaterial({color:16777215*Math.random()});this.updateMorphTargets()};THREE.Mesh.prototype=Object.create(THREE.Object3D.prototype); +THREE.Mesh.prototype.updateMorphTargets=function(){if(void 0!==this.geometry.morphTargets&&0g.far||h.push({distance:x,point:K,face:new THREE.Face3(p,q,m,THREE.Triangle.normal(d,e,f)),faceIndex:null,object:this})}}}else for(s=p.position.array,t=k=0,w=s.length;k +g.far||h.push({distance:x,point:K,face:new THREE.Face3(p,q,m,THREE.Triangle.normal(d,e,f)),faceIndex:null,object:this}))}}else if(k instanceof THREE.Geometry)for(t=this.material instanceof THREE.MeshFaceMaterial,s=!0===t?this.material.materials:null,r=g.precision,u=k.vertices,v=0,y=k.faces.length;vg.far||h.push({distance:x,point:K,face:G,faceIndex:v,object:this}))}}}();THREE.Mesh.prototype.clone=function(a,b){void 0===a&&(a=new THREE.Mesh(this.geometry,this.material));THREE.Object3D.prototype.clone.call(this,a,b);return a};THREE.Bone=function(a){THREE.Object3D.call(this);this.skin=a};THREE.Bone.prototype=Object.create(THREE.Object3D.prototype); +THREE.Skeleton=function(a,b,c){this.useVertexTexture=void 0!==c?c:!0;this.identityMatrix=new THREE.Matrix4;a=a||[];this.bones=a.slice(0);this.useVertexTexture?(this.boneTextureHeight=this.boneTextureWidth=a=256h.end&&(h.end=e);b||(b=g)}}a.firstAnimation=b}; +THREE.MorphAnimMesh.prototype.setAnimationLabel=function(a,b,c){this.geometry.animations||(this.geometry.animations={});this.geometry.animations[a]={start:b,end:c}};THREE.MorphAnimMesh.prototype.playAnimation=function(a,b){var c=this.geometry.animations[a];c?(this.setFrameRange(c.start,c.end),this.duration=(c.end-c.start)/b*1E3,this.time=0):console.warn("animation["+a+"] undefined")}; +THREE.MorphAnimMesh.prototype.updateAnimation=function(a){var b=this.duration/this.length;this.time+=this.direction*a;if(this.mirroredLoop){if(this.time>this.duration||0>this.time)this.direction*=-1,this.time>this.duration&&(this.time=this.duration,this.directionBackwards=!0),0>this.time&&(this.time=0,this.directionBackwards=!1)}else this.time%=this.duration,0>this.time&&(this.time+=this.duration);a=this.startKeyframe+THREE.Math.clamp(Math.floor(this.time/b),0,this.length-1);a!==this.currentKeyframe&& +(this.morphTargetInfluences[this.lastKeyframe]=0,this.morphTargetInfluences[this.currentKeyframe]=1,this.morphTargetInfluences[a]=0,this.lastKeyframe=this.currentKeyframe,this.currentKeyframe=a);b=this.time%b/b;this.directionBackwards&&(b=1-b);this.morphTargetInfluences[this.currentKeyframe]=b;this.morphTargetInfluences[this.lastKeyframe]=1-b}; +THREE.MorphAnimMesh.prototype.interpolateTargets=function(a,b,c){for(var d=this.morphTargetInfluences,e=0,f=d.length;e=this.objects[d].distance)this.objects[d-1].object.visible=!1,this.objects[d].object.visible=!0;else break;for(;dthis.scale.x||c.push({distance:d,point:this.position,face:null,object:this})}}();THREE.Sprite.prototype.clone=function(a){void 0===a&&(a=new THREE.Sprite(this.material));THREE.Object3D.prototype.clone.call(this,a);return a};THREE.Particle=THREE.Sprite; +THREE.LensFlare=function(a,b,c,d,e){THREE.Object3D.call(this);this.lensFlares=[];this.positionScreen=new THREE.Vector3;this.customUpdateCallback=void 0;void 0!==a&&this.add(a,b,c,d,e)};THREE.LensFlare.prototype=Object.create(THREE.Object3D.prototype); +THREE.LensFlare.prototype.add=function(a,b,c,d,e,f){void 0===b&&(b=-1);void 0===c&&(c=0);void 0===f&&(f=1);void 0===e&&(e=new THREE.Color(16777215));void 0===d&&(d=THREE.NormalBlending);c=Math.min(c,Math.max(0,c));this.lensFlares.push({texture:a,size:b,distance:c,x:0,y:0,z:0,scale:1,rotation:1,opacity:f,color:e,blending:d})}; +THREE.LensFlare.prototype.updateLensFlares=function(){var a,b=this.lensFlares.length,c,d=2*-this.positionScreen.x,e=2*-this.positionScreen.y;for(a=0;a dashSize ) {\n\t\tdiscard;\n\t}\n\tgl_FragColor = vec4( diffuse, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,THREE.ShaderChunk.color_fragment,THREE.ShaderChunk.fog_fragment, +"}"].join("\n")},depth:{uniforms:{mNear:{type:"f",value:1},mFar:{type:"f",value:2E3},opacity:{type:"f",value:1}},vertexShader:[THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform float mNear;\nuniform float mFar;\nuniform float opacity;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {",THREE.ShaderChunk.logdepthbuf_fragment, +"\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tfloat depth = gl_FragDepthEXT / gl_FragCoord.w;\n\t#else\n\t\tfloat depth = gl_FragCoord.z / gl_FragCoord.w;\n\t#endif\n\tfloat color = 1.0 - smoothstep( mNear, mFar, depth );\n\tgl_FragColor = vec4( vec3( color ), opacity );\n}"].join("\n")},normal:{uniforms:{opacity:{type:"f",value:1}},vertexShader:["varying vec3 vNormal;",THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {\n\tvNormal = normalize( normalMatrix * normal );", +THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform float opacity;\nvarying vec3 vNormal;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tgl_FragColor = vec4( 0.5 * normalize( vNormal ) + 0.5, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,"}"].join("\n")},normalmap:{uniforms:THREE.UniformsUtils.merge([THREE.UniformsLib.fog,THREE.UniformsLib.lights,THREE.UniformsLib.shadowmap,{enableAO:{type:"i", +value:0},enableDiffuse:{type:"i",value:0},enableSpecular:{type:"i",value:0},enableReflection:{type:"i",value:0},enableDisplacement:{type:"i",value:0},tDisplacement:{type:"t",value:null},tDiffuse:{type:"t",value:null},tCube:{type:"t",value:null},tNormal:{type:"t",value:null},tSpecular:{type:"t",value:null},tAO:{type:"t",value:null},uNormalScale:{type:"v2",value:new THREE.Vector2(1,1)},uDisplacementBias:{type:"f",value:0},uDisplacementScale:{type:"f",value:1},diffuse:{type:"c",value:new THREE.Color(16777215)}, +specular:{type:"c",value:new THREE.Color(1118481)},ambient:{type:"c",value:new THREE.Color(16777215)},shininess:{type:"f",value:30},opacity:{type:"f",value:1},useRefract:{type:"i",value:0},refractionRatio:{type:"f",value:.98},reflectivity:{type:"f",value:.5},uOffset:{type:"v2",value:new THREE.Vector2(0,0)},uRepeat:{type:"v2",value:new THREE.Vector2(1,1)},wrapRGB:{type:"v3",value:new THREE.Vector3(1,1,1)}}]),fragmentShader:["uniform vec3 ambient;\nuniform vec3 diffuse;\nuniform vec3 specular;\nuniform float shininess;\nuniform float opacity;\nuniform bool enableDiffuse;\nuniform bool enableSpecular;\nuniform bool enableAO;\nuniform bool enableReflection;\nuniform sampler2D tDiffuse;\nuniform sampler2D tNormal;\nuniform sampler2D tSpecular;\nuniform sampler2D tAO;\nuniform samplerCube tCube;\nuniform vec2 uNormalScale;\nuniform bool useRefract;\nuniform float refractionRatio;\nuniform float reflectivity;\nvarying vec3 vTangent;\nvarying vec3 vBinormal;\nvarying vec3 vNormal;\nvarying vec2 vUv;\nuniform vec3 ambientLightColor;\n#if MAX_DIR_LIGHTS > 0\n\tuniform vec3 directionalLightColor[ MAX_DIR_LIGHTS ];\n\tuniform vec3 directionalLightDirection[ MAX_DIR_LIGHTS ];\n#endif\n#if MAX_HEMI_LIGHTS > 0\n\tuniform vec3 hemisphereLightSkyColor[ MAX_HEMI_LIGHTS ];\n\tuniform vec3 hemisphereLightGroundColor[ MAX_HEMI_LIGHTS ];\n\tuniform vec3 hemisphereLightDirection[ MAX_HEMI_LIGHTS ];\n#endif\n#if MAX_POINT_LIGHTS > 0\n\tuniform vec3 pointLightColor[ MAX_POINT_LIGHTS ];\n\tuniform vec3 pointLightPosition[ MAX_POINT_LIGHTS ];\n\tuniform float pointLightDistance[ MAX_POINT_LIGHTS ];\n#endif\n#if MAX_SPOT_LIGHTS > 0\n\tuniform vec3 spotLightColor[ MAX_SPOT_LIGHTS ];\n\tuniform vec3 spotLightPosition[ MAX_SPOT_LIGHTS ];\n\tuniform vec3 spotLightDirection[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightAngleCos[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightExponent[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightDistance[ MAX_SPOT_LIGHTS ];\n#endif\n#ifdef WRAP_AROUND\n\tuniform vec3 wrapRGB;\n#endif\nvarying vec3 vWorldPosition;\nvarying vec3 vViewPosition;", +THREE.ShaderChunk.shadowmap_pars_fragment,THREE.ShaderChunk.fog_pars_fragment,THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {",THREE.ShaderChunk.logdepthbuf_fragment,"\tgl_FragColor = vec4( vec3( 1.0 ), opacity );\n\tvec3 specularTex = vec3( 1.0 );\n\tvec3 normalTex = texture2D( tNormal, vUv ).xyz * 2.0 - 1.0;\n\tnormalTex.xy *= uNormalScale;\n\tnormalTex = normalize( normalTex );\n\tif( enableDiffuse ) {\n\t\t#ifdef GAMMA_INPUT\n\t\t\tvec4 texelColor = texture2D( tDiffuse, vUv );\n\t\t\ttexelColor.xyz *= texelColor.xyz;\n\t\t\tgl_FragColor = gl_FragColor * texelColor;\n\t\t#else\n\t\t\tgl_FragColor = gl_FragColor * texture2D( tDiffuse, vUv );\n\t\t#endif\n\t}\n\tif( enableAO ) {\n\t\t#ifdef GAMMA_INPUT\n\t\t\tvec4 aoColor = texture2D( tAO, vUv );\n\t\t\taoColor.xyz *= aoColor.xyz;\n\t\t\tgl_FragColor.xyz = gl_FragColor.xyz * aoColor.xyz;\n\t\t#else\n\t\t\tgl_FragColor.xyz = gl_FragColor.xyz * texture2D( tAO, vUv ).xyz;\n\t\t#endif\n\t}", +THREE.ShaderChunk.alphatest_fragment,"\tif( enableSpecular )\n\t\tspecularTex = texture2D( tSpecular, vUv ).xyz;\n\tmat3 tsb = mat3( normalize( vTangent ), normalize( vBinormal ), normalize( vNormal ) );\n\tvec3 finalNormal = tsb * normalTex;\n\t#ifdef FLIP_SIDED\n\t\tfinalNormal = -finalNormal;\n\t#endif\n\tvec3 normal = normalize( finalNormal );\n\tvec3 viewPosition = normalize( vViewPosition );\n\t#if MAX_POINT_LIGHTS > 0\n\t\tvec3 pointDiffuse = vec3( 0.0 );\n\t\tvec3 pointSpecular = vec3( 0.0 );\n\t\tfor ( int i = 0; i < MAX_POINT_LIGHTS; i ++ ) {\n\t\t\tvec4 lPosition = viewMatrix * vec4( pointLightPosition[ i ], 1.0 );\n\t\t\tvec3 pointVector = lPosition.xyz + vViewPosition.xyz;\n\t\t\tfloat pointDistance = 1.0;\n\t\t\tif ( pointLightDistance[ i ] > 0.0 )\n\t\t\t\tpointDistance = 1.0 - min( ( length( pointVector ) / pointLightDistance[ i ] ), 1.0 );\n\t\t\tpointVector = normalize( pointVector );\n\t\t\t#ifdef WRAP_AROUND\n\t\t\t\tfloat pointDiffuseWeightFull = max( dot( normal, pointVector ), 0.0 );\n\t\t\t\tfloat pointDiffuseWeightHalf = max( 0.5 * dot( normal, pointVector ) + 0.5, 0.0 );\n\t\t\t\tvec3 pointDiffuseWeight = mix( vec3( pointDiffuseWeightFull ), vec3( pointDiffuseWeightHalf ), wrapRGB );\n\t\t\t#else\n\t\t\t\tfloat pointDiffuseWeight = max( dot( normal, pointVector ), 0.0 );\n\t\t\t#endif\n\t\t\tpointDiffuse += pointDistance * pointLightColor[ i ] * diffuse * pointDiffuseWeight;\n\t\t\tvec3 pointHalfVector = normalize( pointVector + viewPosition );\n\t\t\tfloat pointDotNormalHalf = max( dot( normal, pointHalfVector ), 0.0 );\n\t\t\tfloat pointSpecularWeight = specularTex.r * max( pow( pointDotNormalHalf, shininess ), 0.0 );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( pointVector, pointHalfVector ), 0.0 ), 5.0 );\n\t\t\tpointSpecular += schlick * pointLightColor[ i ] * pointSpecularWeight * pointDiffuseWeight * pointDistance * specularNormalization;\n\t\t}\n\t#endif\n\t#if MAX_SPOT_LIGHTS > 0\n\t\tvec3 spotDiffuse = vec3( 0.0 );\n\t\tvec3 spotSpecular = vec3( 0.0 );\n\t\tfor ( int i = 0; i < MAX_SPOT_LIGHTS; i ++ ) {\n\t\t\tvec4 lPosition = viewMatrix * vec4( spotLightPosition[ i ], 1.0 );\n\t\t\tvec3 spotVector = lPosition.xyz + vViewPosition.xyz;\n\t\t\tfloat spotDistance = 1.0;\n\t\t\tif ( spotLightDistance[ i ] > 0.0 )\n\t\t\t\tspotDistance = 1.0 - min( ( length( spotVector ) / spotLightDistance[ i ] ), 1.0 );\n\t\t\tspotVector = normalize( spotVector );\n\t\t\tfloat spotEffect = dot( spotLightDirection[ i ], normalize( spotLightPosition[ i ] - vWorldPosition ) );\n\t\t\tif ( spotEffect > spotLightAngleCos[ i ] ) {\n\t\t\t\tspotEffect = max( pow( max( spotEffect, 0.0 ), spotLightExponent[ i ] ), 0.0 );\n\t\t\t\t#ifdef WRAP_AROUND\n\t\t\t\t\tfloat spotDiffuseWeightFull = max( dot( normal, spotVector ), 0.0 );\n\t\t\t\t\tfloat spotDiffuseWeightHalf = max( 0.5 * dot( normal, spotVector ) + 0.5, 0.0 );\n\t\t\t\t\tvec3 spotDiffuseWeight = mix( vec3( spotDiffuseWeightFull ), vec3( spotDiffuseWeightHalf ), wrapRGB );\n\t\t\t\t#else\n\t\t\t\t\tfloat spotDiffuseWeight = max( dot( normal, spotVector ), 0.0 );\n\t\t\t\t#endif\n\t\t\t\tspotDiffuse += spotDistance * spotLightColor[ i ] * diffuse * spotDiffuseWeight * spotEffect;\n\t\t\t\tvec3 spotHalfVector = normalize( spotVector + viewPosition );\n\t\t\t\tfloat spotDotNormalHalf = max( dot( normal, spotHalfVector ), 0.0 );\n\t\t\t\tfloat spotSpecularWeight = specularTex.r * max( pow( spotDotNormalHalf, shininess ), 0.0 );\n\t\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( spotVector, spotHalfVector ), 0.0 ), 5.0 );\n\t\t\t\tspotSpecular += schlick * spotLightColor[ i ] * spotSpecularWeight * spotDiffuseWeight * spotDistance * specularNormalization * spotEffect;\n\t\t\t}\n\t\t}\n\t#endif\n\t#if MAX_DIR_LIGHTS > 0\n\t\tvec3 dirDiffuse = vec3( 0.0 );\n\t\tvec3 dirSpecular = vec3( 0.0 );\n\t\tfor( int i = 0; i < MAX_DIR_LIGHTS; i++ ) {\n\t\t\tvec4 lDirection = viewMatrix * vec4( directionalLightDirection[ i ], 0.0 );\n\t\t\tvec3 dirVector = normalize( lDirection.xyz );\n\t\t\t#ifdef WRAP_AROUND\n\t\t\t\tfloat directionalLightWeightingFull = max( dot( normal, dirVector ), 0.0 );\n\t\t\t\tfloat directionalLightWeightingHalf = max( 0.5 * dot( normal, dirVector ) + 0.5, 0.0 );\n\t\t\t\tvec3 dirDiffuseWeight = mix( vec3( directionalLightWeightingFull ), vec3( directionalLightWeightingHalf ), wrapRGB );\n\t\t\t#else\n\t\t\t\tfloat dirDiffuseWeight = max( dot( normal, dirVector ), 0.0 );\n\t\t\t#endif\n\t\t\tdirDiffuse += directionalLightColor[ i ] * diffuse * dirDiffuseWeight;\n\t\t\tvec3 dirHalfVector = normalize( dirVector + viewPosition );\n\t\t\tfloat dirDotNormalHalf = max( dot( normal, dirHalfVector ), 0.0 );\n\t\t\tfloat dirSpecularWeight = specularTex.r * max( pow( dirDotNormalHalf, shininess ), 0.0 );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( dirVector, dirHalfVector ), 0.0 ), 5.0 );\n\t\t\tdirSpecular += schlick * directionalLightColor[ i ] * dirSpecularWeight * dirDiffuseWeight * specularNormalization;\n\t\t}\n\t#endif\n\t#if MAX_HEMI_LIGHTS > 0\n\t\tvec3 hemiDiffuse = vec3( 0.0 );\n\t\tvec3 hemiSpecular = vec3( 0.0 );\n\t\tfor( int i = 0; i < MAX_HEMI_LIGHTS; i ++ ) {\n\t\t\tvec4 lDirection = viewMatrix * vec4( hemisphereLightDirection[ i ], 0.0 );\n\t\t\tvec3 lVector = normalize( lDirection.xyz );\n\t\t\tfloat dotProduct = dot( normal, lVector );\n\t\t\tfloat hemiDiffuseWeight = 0.5 * dotProduct + 0.5;\n\t\t\tvec3 hemiColor = mix( hemisphereLightGroundColor[ i ], hemisphereLightSkyColor[ i ], hemiDiffuseWeight );\n\t\t\themiDiffuse += diffuse * hemiColor;\n\t\t\tvec3 hemiHalfVectorSky = normalize( lVector + viewPosition );\n\t\t\tfloat hemiDotNormalHalfSky = 0.5 * dot( normal, hemiHalfVectorSky ) + 0.5;\n\t\t\tfloat hemiSpecularWeightSky = specularTex.r * max( pow( max( hemiDotNormalHalfSky, 0.0 ), shininess ), 0.0 );\n\t\t\tvec3 lVectorGround = -lVector;\n\t\t\tvec3 hemiHalfVectorGround = normalize( lVectorGround + viewPosition );\n\t\t\tfloat hemiDotNormalHalfGround = 0.5 * dot( normal, hemiHalfVectorGround ) + 0.5;\n\t\t\tfloat hemiSpecularWeightGround = specularTex.r * max( pow( max( hemiDotNormalHalfGround, 0.0 ), shininess ), 0.0 );\n\t\t\tfloat dotProductGround = dot( normal, lVectorGround );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlickSky = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( lVector, hemiHalfVectorSky ), 0.0 ), 5.0 );\n\t\t\tvec3 schlickGround = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( lVectorGround, hemiHalfVectorGround ), 0.0 ), 5.0 );\n\t\t\themiSpecular += hemiColor * specularNormalization * ( schlickSky * hemiSpecularWeightSky * max( dotProduct, 0.0 ) + schlickGround * hemiSpecularWeightGround * max( dotProductGround, 0.0 ) );\n\t\t}\n\t#endif\n\tvec3 totalDiffuse = vec3( 0.0 );\n\tvec3 totalSpecular = vec3( 0.0 );\n\t#if MAX_DIR_LIGHTS > 0\n\t\ttotalDiffuse += dirDiffuse;\n\t\ttotalSpecular += dirSpecular;\n\t#endif\n\t#if MAX_HEMI_LIGHTS > 0\n\t\ttotalDiffuse += hemiDiffuse;\n\t\ttotalSpecular += hemiSpecular;\n\t#endif\n\t#if MAX_POINT_LIGHTS > 0\n\t\ttotalDiffuse += pointDiffuse;\n\t\ttotalSpecular += pointSpecular;\n\t#endif\n\t#if MAX_SPOT_LIGHTS > 0\n\t\ttotalDiffuse += spotDiffuse;\n\t\ttotalSpecular += spotSpecular;\n\t#endif\n\t#ifdef METAL\n\t\tgl_FragColor.xyz = gl_FragColor.xyz * ( totalDiffuse + ambientLightColor * ambient + totalSpecular );\n\t#else\n\t\tgl_FragColor.xyz = gl_FragColor.xyz * ( totalDiffuse + ambientLightColor * ambient ) + totalSpecular;\n\t#endif\n\tif ( enableReflection ) {\n\t\tvec3 vReflect;\n\t\tvec3 cameraToVertex = normalize( vWorldPosition - cameraPosition );\n\t\tif ( useRefract ) {\n\t\t\tvReflect = refract( cameraToVertex, normal, refractionRatio );\n\t\t} else {\n\t\t\tvReflect = reflect( cameraToVertex, normal );\n\t\t}\n\t\tvec4 cubeColor = textureCube( tCube, vec3( -vReflect.x, vReflect.yz ) );\n\t\t#ifdef GAMMA_INPUT\n\t\t\tcubeColor.xyz *= cubeColor.xyz;\n\t\t#endif\n\t\tgl_FragColor.xyz = mix( gl_FragColor.xyz, cubeColor.xyz, specularTex.r * reflectivity );\n\t}", +THREE.ShaderChunk.shadowmap_fragment,THREE.ShaderChunk.linear_to_gamma_fragment,THREE.ShaderChunk.fog_fragment,"}"].join("\n"),vertexShader:["attribute vec4 tangent;\nuniform vec2 uOffset;\nuniform vec2 uRepeat;\nuniform bool enableDisplacement;\n#ifdef VERTEX_TEXTURES\n\tuniform sampler2D tDisplacement;\n\tuniform float uDisplacementScale;\n\tuniform float uDisplacementBias;\n#endif\nvarying vec3 vTangent;\nvarying vec3 vBinormal;\nvarying vec3 vNormal;\nvarying vec2 vUv;\nvarying vec3 vWorldPosition;\nvarying vec3 vViewPosition;", +THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.shadowmap_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.skinbase_vertex,THREE.ShaderChunk.skinnormal_vertex,"\t#ifdef USE_SKINNING\n\t\tvNormal = normalize( normalMatrix * skinnedNormal.xyz );\n\t\tvec4 skinnedTangent = skinMatrix * vec4( tangent.xyz, 0.0 );\n\t\tvTangent = normalize( normalMatrix * skinnedTangent.xyz );\n\t#else\n\t\tvNormal = normalize( normalMatrix * normal );\n\t\tvTangent = normalize( normalMatrix * tangent.xyz );\n\t#endif\n\tvBinormal = normalize( cross( vNormal, vTangent ) * tangent.w );\n\tvUv = uv * uRepeat + uOffset;\n\tvec3 displacedPosition;\n\t#ifdef VERTEX_TEXTURES\n\t\tif ( enableDisplacement ) {\n\t\t\tvec3 dv = texture2D( tDisplacement, uv ).xyz;\n\t\t\tfloat df = uDisplacementScale * dv.x + uDisplacementBias;\n\t\t\tdisplacedPosition = position + normalize( normal ) * df;\n\t\t} else {\n\t\t\t#ifdef USE_SKINNING\n\t\t\t\tvec4 skinVertex = bindMatrix * vec4( position, 1.0 );\n\t\t\t\tvec4 skinned = vec4( 0.0 );\n\t\t\t\tskinned += boneMatX * skinVertex * skinWeight.x;\n\t\t\t\tskinned += boneMatY * skinVertex * skinWeight.y;\n\t\t\t\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\t\t\t\tskinned += boneMatW * skinVertex * skinWeight.w;\n\t\t\t\tskinned = bindMatrixInverse * skinned;\n\t\t\t\tdisplacedPosition = skinned.xyz;\n\t\t\t#else\n\t\t\t\tdisplacedPosition = position;\n\t\t\t#endif\n\t\t}\n\t#else\n\t\t#ifdef USE_SKINNING\n\t\t\tvec4 skinVertex = bindMatrix * vec4( position, 1.0 );\n\t\t\tvec4 skinned = vec4( 0.0 );\n\t\t\tskinned += boneMatX * skinVertex * skinWeight.x;\n\t\t\tskinned += boneMatY * skinVertex * skinWeight.y;\n\t\t\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\t\t\tskinned += boneMatW * skinVertex * skinWeight.w;\n\t\t\tskinned = bindMatrixInverse * skinned;\n\t\t\tdisplacedPosition = skinned.xyz;\n\t\t#else\n\t\t\tdisplacedPosition = position;\n\t\t#endif\n\t#endif\n\tvec4 mvPosition = modelViewMatrix * vec4( displacedPosition, 1.0 );\n\tvec4 worldPosition = modelMatrix * vec4( displacedPosition, 1.0 );\n\tgl_Position = projectionMatrix * mvPosition;", +THREE.ShaderChunk.logdepthbuf_vertex,"\tvWorldPosition = worldPosition.xyz;\n\tvViewPosition = -mvPosition.xyz;\n\t#ifdef USE_SHADOWMAP\n\t\tfor( int i = 0; i < MAX_SHADOWS; i ++ ) {\n\t\t\tvShadowCoord[ i ] = shadowMatrix[ i ] * worldPosition;\n\t\t}\n\t#endif\n}"].join("\n")},cube:{uniforms:{tCube:{type:"t",value:null},tFlip:{type:"f",value:-1}},vertexShader:["varying vec3 vWorldPosition;",THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {\n\tvec4 worldPosition = modelMatrix * vec4( position, 1.0 );\n\tvWorldPosition = worldPosition.xyz;\n\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );", +THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform samplerCube tCube;\nuniform float tFlip;\nvarying vec3 vWorldPosition;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tgl_FragColor = textureCube( tCube, vec3( tFlip * vWorldPosition.x, vWorldPosition.yz ) );",THREE.ShaderChunk.logdepthbuf_fragment,"}"].join("\n")},depthRGBA:{uniforms:{},vertexShader:[THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex, +"void main() {",THREE.ShaderChunk.skinbase_vertex,THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.skinning_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:[THREE.ShaderChunk.logdepthbuf_pars_fragment,"vec4 pack_depth( const in float depth ) {\n\tconst vec4 bit_shift = vec4( 256.0 * 256.0 * 256.0, 256.0 * 256.0, 256.0, 1.0 );\n\tconst vec4 bit_mask = vec4( 0.0, 1.0 / 256.0, 1.0 / 256.0, 1.0 / 256.0 );\n\tvec4 res = mod( depth * bit_shift * vec4( 255 ), vec4( 256 ) ) / vec4( 255 );\n\tres -= res.xxyz * bit_mask;\n\treturn res;\n}\nvoid main() {", +THREE.ShaderChunk.logdepthbuf_fragment,"\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tgl_FragData[ 0 ] = pack_depth( gl_FragDepthEXT );\n\t#else\n\t\tgl_FragData[ 0 ] = pack_depth( gl_FragCoord.z );\n\t#endif\n}"].join("\n")}}; +THREE.WebGLRenderer=function(a){function b(a){var b=a.geometry;a=a.material;var c=b.vertices.length;if(a.attributes){void 0===b.__webglCustomAttributesList&&(b.__webglCustomAttributesList=[]);for(var d in a.attributes){var e=a.attributes[d];if(!e.__webglInitialized||e.createUniqueBuffers){e.__webglInitialized=!0;var f=1;"v2"===e.type?f=2:"v3"===e.type?f=3:"v4"===e.type?f=4:"c"===e.type&&(f=3);e.size=f;e.array=new Float32Array(c*f);e.buffer=l.createBuffer();e.buffer.belongsToAttribute=d;e.needsUpdate= +!0}b.__webglCustomAttributesList.push(e)}}}function c(a,b){var c=b.geometry,e=a.faces3,f=3*e.length,g=1*e.length,h=3*e.length,e=d(b,a);a.__vertexArray=new Float32Array(3*f);a.__normalArray=new Float32Array(3*f);a.__colorArray=new Float32Array(3*f);a.__uvArray=new Float32Array(2*f);1Aa;Aa++)Cb=ma[Aa],Ta[Sa]=Cb.x,Ta[Sa+1]=Cb.y,Ta[Sa+2]=Cb.z,Sa+=3;else for(Aa=0;3>Aa;Aa++)Ta[Sa]=pa.x,Ta[Sa+1]=pa.y,Ta[Sa+2]=pa.z,Sa+=3;l.bindBuffer(l.ARRAY_BUFFER,C.__webglNormalBuffer);l.bufferData(l.ARRAY_BUFFER, +Ta,S)}if(Kc&&ua){M=0;for(ea=N.length;MAa;Aa++)Oa=hb[Aa],sb[qb]=Oa.x,sb[qb+1]=Oa.y,qb+=2;0Aa;Aa++)Qb=za[Aa],fb[rb]=Qb.x,fb[rb+1]=Qb.y,rb+=2;0h&&(f[v].counter+=1,k=f[v].hash+"_"+f[v].counter,k in r||(p={id:rc++, +faces3:[],materialIndex:v,vertices:0,numMorphTargets:m,numMorphNormals:n},r[k]=p,q.push(p)));r[k].faces3.push(t);r[k].vertices+=3}a[g]=q;d.groupsNeedUpdate=!1}a=xb[d.id];g=0;for(e=a.length;gDa;Da++)kb[Da]=!J.autoScaleCubemaps||Ob||Tb?Tb?ua.image[Da].image:ua.image[Da]:R(ua.image[Da],$c);var ka=kb[0],Zb=THREE.Math.isPowerOfTwo(ka.width)&&THREE.Math.isPowerOfTwo(ka.height),ab=Q(ua.format),Fb=Q(ua.type);F(l.TEXTURE_CUBE_MAP,ua,Zb);for(Da=0;6>Da;Da++)if(Ob)for(var gb,$b=kb[Da].mipmaps,ga=0,Xb=$b.length;ga=Oc&&console.warn("WebGLRenderer: trying to use "+a+" texture units while this GPU supports only "+ +Oc);dc+=1;return a}function x(a,b){a._modelViewMatrix.multiplyMatrices(b.matrixWorldInverse,a.matrixWorld);a._normalMatrix.getNormalMatrix(a._modelViewMatrix)}function D(a,b,c,d){a[b]=c.r*c.r*d;a[b+1]=c.g*c.g*d;a[b+2]=c.b*c.b*d}function E(a,b,c,d){a[b]=c.r*d;a[b+1]=c.g*d;a[b+2]=c.b*d}function A(a){a!==Pc&&(l.lineWidth(a),Pc=a)}function B(a,b,c){Qc!==a&&(a?l.enable(l.POLYGON_OFFSET_FILL):l.disable(l.POLYGON_OFFSET_FILL),Qc=a);!a||Rc===b&&Sc===c||(l.polygonOffset(b,c),Rc=b,Sc=c)}function F(a,b,c){c? +(l.texParameteri(a,l.TEXTURE_WRAP_S,Q(b.wrapS)),l.texParameteri(a,l.TEXTURE_WRAP_T,Q(b.wrapT)),l.texParameteri(a,l.TEXTURE_MAG_FILTER,Q(b.magFilter)),l.texParameteri(a,l.TEXTURE_MIN_FILTER,Q(b.minFilter))):(l.texParameteri(a,l.TEXTURE_WRAP_S,l.CLAMP_TO_EDGE),l.texParameteri(a,l.TEXTURE_WRAP_T,l.CLAMP_TO_EDGE),l.texParameteri(a,l.TEXTURE_MAG_FILTER,T(b.magFilter)),l.texParameteri(a,l.TEXTURE_MIN_FILTER,T(b.minFilter)));(c=pa.get("EXT_texture_filter_anisotropic"))&&b.type!==THREE.FloatType&&(1b||a.height>b){var c=b/Math.max(a.width,a.height),d=document.createElement("canvas");d.width=Math.floor(a.width*c);d.height=Math.floor(a.height*c);d.getContext("2d").drawImage(a,0,0,a.width,a.height,0,0,d.width,d.height);console.log("THREE.WebGLRenderer:",a,"is too big ("+a.width+"x"+a.height+"). Resized to "+d.width+"x"+d.height+ +".");return d}return a}function H(a,b){l.bindRenderbuffer(l.RENDERBUFFER,a);b.depthBuffer&&!b.stencilBuffer?(l.renderbufferStorage(l.RENDERBUFFER,l.DEPTH_COMPONENT16,b.width,b.height),l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_ATTACHMENT,l.RENDERBUFFER,a)):b.depthBuffer&&b.stencilBuffer?(l.renderbufferStorage(l.RENDERBUFFER,l.DEPTH_STENCIL,b.width,b.height),l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_STENCIL_ATTACHMENT,l.RENDERBUFFER,a)):l.renderbufferStorage(l.RENDERBUFFER,l.RGBA4,b.width, +b.height)}function C(a){a instanceof THREE.WebGLRenderTargetCube?(l.bindTexture(l.TEXTURE_CUBE_MAP,a.__webglTexture),l.generateMipmap(l.TEXTURE_CUBE_MAP),l.bindTexture(l.TEXTURE_CUBE_MAP,null)):(l.bindTexture(l.TEXTURE_2D,a.__webglTexture),l.generateMipmap(l.TEXTURE_2D),l.bindTexture(l.TEXTURE_2D,null))}function T(a){return a===THREE.NearestFilter||a===THREE.NearestMipMapNearestFilter||a===THREE.NearestMipMapLinearFilter?l.NEAREST:l.LINEAR}function Q(a){var b;if(a===THREE.RepeatWrapping)return l.REPEAT; +if(a===THREE.ClampToEdgeWrapping)return l.CLAMP_TO_EDGE;if(a===THREE.MirroredRepeatWrapping)return l.MIRRORED_REPEAT;if(a===THREE.NearestFilter)return l.NEAREST;if(a===THREE.NearestMipMapNearestFilter)return l.NEAREST_MIPMAP_NEAREST;if(a===THREE.NearestMipMapLinearFilter)return l.NEAREST_MIPMAP_LINEAR;if(a===THREE.LinearFilter)return l.LINEAR;if(a===THREE.LinearMipMapNearestFilter)return l.LINEAR_MIPMAP_NEAREST;if(a===THREE.LinearMipMapLinearFilter)return l.LINEAR_MIPMAP_LINEAR;if(a===THREE.UnsignedByteType)return l.UNSIGNED_BYTE; +if(a===THREE.UnsignedShort4444Type)return l.UNSIGNED_SHORT_4_4_4_4;if(a===THREE.UnsignedShort5551Type)return l.UNSIGNED_SHORT_5_5_5_1;if(a===THREE.UnsignedShort565Type)return l.UNSIGNED_SHORT_5_6_5;if(a===THREE.ByteType)return l.BYTE;if(a===THREE.ShortType)return l.SHORT;if(a===THREE.UnsignedShortType)return l.UNSIGNED_SHORT;if(a===THREE.IntType)return l.INT;if(a===THREE.UnsignedIntType)return l.UNSIGNED_INT;if(a===THREE.FloatType)return l.FLOAT;if(a===THREE.AlphaFormat)return l.ALPHA;if(a===THREE.RGBFormat)return l.RGB; +if(a===THREE.RGBAFormat)return l.RGBA;if(a===THREE.LuminanceFormat)return l.LUMINANCE;if(a===THREE.LuminanceAlphaFormat)return l.LUMINANCE_ALPHA;if(a===THREE.AddEquation)return l.FUNC_ADD;if(a===THREE.SubtractEquation)return l.FUNC_SUBTRACT;if(a===THREE.ReverseSubtractEquation)return l.FUNC_REVERSE_SUBTRACT;if(a===THREE.ZeroFactor)return l.ZERO;if(a===THREE.OneFactor)return l.ONE;if(a===THREE.SrcColorFactor)return l.SRC_COLOR;if(a===THREE.OneMinusSrcColorFactor)return l.ONE_MINUS_SRC_COLOR;if(a=== +THREE.SrcAlphaFactor)return l.SRC_ALPHA;if(a===THREE.OneMinusSrcAlphaFactor)return l.ONE_MINUS_SRC_ALPHA;if(a===THREE.DstAlphaFactor)return l.DST_ALPHA;if(a===THREE.OneMinusDstAlphaFactor)return l.ONE_MINUS_DST_ALPHA;if(a===THREE.DstColorFactor)return l.DST_COLOR;if(a===THREE.OneMinusDstColorFactor)return l.ONE_MINUS_DST_COLOR;if(a===THREE.SrcAlphaSaturateFactor)return l.SRC_ALPHA_SATURATE;b=pa.get("WEBGL_compressed_texture_s3tc");if(null!==b){if(a===THREE.RGB_S3TC_DXT1_Format)return b.COMPRESSED_RGB_S3TC_DXT1_EXT; +if(a===THREE.RGBA_S3TC_DXT1_Format)return b.COMPRESSED_RGBA_S3TC_DXT1_EXT;if(a===THREE.RGBA_S3TC_DXT3_Format)return b.COMPRESSED_RGBA_S3TC_DXT3_EXT;if(a===THREE.RGBA_S3TC_DXT5_Format)return b.COMPRESSED_RGBA_S3TC_DXT5_EXT}b=pa.get("WEBGL_compressed_texture_pvrtc");if(null!==b){if(a===THREE.RGB_PVRTC_4BPPV1_Format)return b.COMPRESSED_RGB_PVRTC_4BPPV1_IMG;if(a===THREE.RGB_PVRTC_2BPPV1_Format)return b.COMPRESSED_RGB_PVRTC_2BPPV1_IMG;if(a===THREE.RGBA_PVRTC_4BPPV1_Format)return b.COMPRESSED_RGBA_PVRTC_4BPPV1_IMG; +if(a===THREE.RGBA_PVRTC_2BPPV1_Format)return b.COMPRESSED_RGBA_PVRTC_2BPPV1_IMG}b=pa.get("EXT_blend_minmax");if(null!==b){if(a===THREE.MinEquation)return b.MIN_EXT;if(a===THREE.MaxEquation)return b.MAX_EXT}return 0}console.log("THREE.WebGLRenderer",THREE.REVISION);a=a||{};var O=void 0!==a.canvas?a.canvas:document.createElement("canvas"),S=void 0!==a.context?a.context:null,X=void 0!==a.precision?a.precision:"highp",Y=void 0!==a.alpha?a.alpha:!1,la=void 0!==a.depth?a.depth:!0,ma=void 0!==a.stencil? +a.stencil:!0,ya=void 0!==a.antialias?a.antialias:!1,P=void 0!==a.premultipliedAlpha?a.premultipliedAlpha:!0,Ga=void 0!==a.preserveDrawingBuffer?a.preserveDrawingBuffer:!1,Fa=void 0!==a.logarithmicDepthBuffer?a.logarithmicDepthBuffer:!1,za=new THREE.Color(0),bb=0,cb=[],ob={},jb=[],Jb=[],Ib=[],yb=[],Ra=[];this.domElement=O;this.context=null;this.devicePixelRatio=void 0!==a.devicePixelRatio?a.devicePixelRatio:void 0!==self.devicePixelRatio?self.devicePixelRatio:1;this.sortObjects=this.autoClearStencil= +this.autoClearDepth=this.autoClearColor=this.autoClear=!0;this.shadowMapEnabled=this.gammaOutput=this.gammaInput=!1;this.shadowMapType=THREE.PCFShadowMap;this.shadowMapCullFace=THREE.CullFaceFront;this.shadowMapCascade=this.shadowMapDebug=!1;this.maxMorphTargets=8;this.maxMorphNormals=4;this.autoScaleCubemaps=!0;this.info={memory:{programs:0,geometries:0,textures:0},render:{calls:0,vertices:0,faces:0,points:0}};var J=this,hb=[],tc=null,Tc=null,Kb=-1,Oa=-1,ec=null,dc=0,Lb=-1,Mb=-1,pb=-1,Nb=-1,Ob=-1, +Xb=-1,Yb=-1,nb=-1,Qc=null,Rc=null,Sc=null,Pc=null,Pb=0,kc=0,lc=O.width,mc=O.height,Uc=0,Vc=0,wb=new Uint8Array(16),ib=new Uint8Array(16),Ec=new THREE.Frustum,Ac=new THREE.Matrix4,Gc=new THREE.Matrix4,Na=new THREE.Vector3,sa=new THREE.Vector3,fc=!0,Mc={ambient:[0,0,0],directional:{length:0,colors:[],positions:[]},point:{length:0,colors:[],positions:[],distances:[]},spot:{length:0,colors:[],positions:[],distances:[],directions:[],anglesCos:[],exponents:[]},hemi:{length:0,skyColors:[],groundColors:[], +positions:[]}},l;try{var Wc={alpha:Y,depth:la,stencil:ma,antialias:ya,premultipliedAlpha:P,preserveDrawingBuffer:Ga};l=S||O.getContext("webgl",Wc)||O.getContext("experimental-webgl",Wc);if(null===l){if(null!==O.getContext("webgl"))throw"Error creating WebGL context with your selected attributes.";throw"Error creating WebGL context.";}}catch(ad){console.error(ad)}void 0===l.getShaderPrecisionFormat&&(l.getShaderPrecisionFormat=function(){return{rangeMin:1,rangeMax:1,precision:1}});var pa=new THREE.WebGLExtensions(l); +pa.get("OES_texture_float");pa.get("OES_texture_float_linear");pa.get("OES_standard_derivatives");Fa&&pa.get("EXT_frag_depth");l.clearColor(0,0,0,1);l.clearDepth(1);l.clearStencil(0);l.enable(l.DEPTH_TEST);l.depthFunc(l.LEQUAL);l.frontFace(l.CCW);l.cullFace(l.BACK);l.enable(l.CULL_FACE);l.enable(l.BLEND);l.blendEquation(l.FUNC_ADD);l.blendFunc(l.SRC_ALPHA,l.ONE_MINUS_SRC_ALPHA);l.viewport(Pb,kc,lc,mc);l.clearColor(za.r,za.g,za.b,bb);this.context=l;var Oc=l.getParameter(l.MAX_TEXTURE_IMAGE_UNITS), +bd=l.getParameter(l.MAX_VERTEX_TEXTURE_IMAGE_UNITS),cd=l.getParameter(l.MAX_TEXTURE_SIZE),$c=l.getParameter(l.MAX_CUBE_MAP_TEXTURE_SIZE),sc=0b;b++)l.deleteFramebuffer(a.__webglFramebuffer[b]),l.deleteRenderbuffer(a.__webglRenderbuffer[b]); +else l.deleteFramebuffer(a.__webglFramebuffer),l.deleteRenderbuffer(a.__webglRenderbuffer);delete a.__webglFramebuffer;delete a.__webglRenderbuffer}J.info.memory.textures--},Dc=function(a){a=a.target;a.removeEventListener("dispose",Dc);Cc(a)},Yc=function(a){for(var b="__webglVertexBuffer __webglNormalBuffer __webglTangentBuffer __webglColorBuffer __webglUVBuffer __webglUV2Buffer __webglSkinIndicesBuffer __webglSkinWeightsBuffer __webglFaceBuffer __webglLineBuffer __webglLineDistanceBuffer".split(" "), +c=0,d=b.length;cd.numSupportedMorphTargets?(n.sort(p),n.length=d.numSupportedMorphTargets):n.length>d.numSupportedMorphNormals?n.sort(p):0===n.length&&n.push([0, +0]);for(m=0;mf;f++){a.__webglFramebuffer[f]=l.createFramebuffer();a.__webglRenderbuffer[f]=l.createRenderbuffer();l.texImage2D(l.TEXTURE_CUBE_MAP_POSITIVE_X+f,0,d,a.width,a.height,0,d,e,null);var g=a,h=l.TEXTURE_CUBE_MAP_POSITIVE_X+f;l.bindFramebuffer(l.FRAMEBUFFER,a.__webglFramebuffer[f]);l.framebufferTexture2D(l.FRAMEBUFFER,l.COLOR_ATTACHMENT0,h,g.__webglTexture,0);H(a.__webglRenderbuffer[f],a)}c&&l.generateMipmap(l.TEXTURE_CUBE_MAP)}else a.__webglFramebuffer= +l.createFramebuffer(),a.__webglRenderbuffer=a.shareDepthFrom?a.shareDepthFrom.__webglRenderbuffer:l.createRenderbuffer(),l.bindTexture(l.TEXTURE_2D,a.__webglTexture),F(l.TEXTURE_2D,a,c),l.texImage2D(l.TEXTURE_2D,0,d,a.width,a.height,0,d,e,null),d=l.TEXTURE_2D,l.bindFramebuffer(l.FRAMEBUFFER,a.__webglFramebuffer),l.framebufferTexture2D(l.FRAMEBUFFER,l.COLOR_ATTACHMENT0,d,a.__webglTexture,0),a.shareDepthFrom?a.depthBuffer&&!a.stencilBuffer?l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_ATTACHMENT, +l.RENDERBUFFER,a.__webglRenderbuffer):a.depthBuffer&&a.stencilBuffer&&l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_STENCIL_ATTACHMENT,l.RENDERBUFFER,a.__webglRenderbuffer):H(a.__webglRenderbuffer,a),c&&l.generateMipmap(l.TEXTURE_2D);b?l.bindTexture(l.TEXTURE_CUBE_MAP,null):l.bindTexture(l.TEXTURE_2D,null);l.bindRenderbuffer(l.RENDERBUFFER,null);l.bindFramebuffer(l.FRAMEBUFFER,null)}a?(b=b?a.__webglFramebuffer[a.activeCubeFace]:a.__webglFramebuffer,c=a.width,a=a.height,e=d=0):(b=null,c=lc,a=mc, +d=Pb,e=kc);b!==Tc&&(l.bindFramebuffer(l.FRAMEBUFFER,b),l.viewport(d,e,c,a),Tc=b);Uc=c;Vc=a};this.initMaterial=function(){console.warn("THREE.WebGLRenderer: .initMaterial() has been removed.")};this.addPrePlugin=function(){console.warn("THREE.WebGLRenderer: .addPrePlugin() has been removed.")};this.addPostPlugin=function(){console.warn("THREE.WebGLRenderer: .addPostPlugin() has been removed.")};this.updateShadowMap=function(){console.warn("THREE.WebGLRenderer: .updateShadowMap() has been removed.")}}; +THREE.WebGLRenderTarget=function(a,b,c){this.width=a;this.height=b;c=c||{};this.wrapS=void 0!==c.wrapS?c.wrapS:THREE.ClampToEdgeWrapping;this.wrapT=void 0!==c.wrapT?c.wrapT:THREE.ClampToEdgeWrapping;this.magFilter=void 0!==c.magFilter?c.magFilter:THREE.LinearFilter;this.minFilter=void 0!==c.minFilter?c.minFilter:THREE.LinearMipMapLinearFilter;this.anisotropy=void 0!==c.anisotropy?c.anisotropy:1;this.offset=new THREE.Vector2(0,0);this.repeat=new THREE.Vector2(1,1);this.format=void 0!==c.format?c.format: +THREE.RGBAFormat;this.type=void 0!==c.type?c.type:THREE.UnsignedByteType;this.depthBuffer=void 0!==c.depthBuffer?c.depthBuffer:!0;this.stencilBuffer=void 0!==c.stencilBuffer?c.stencilBuffer:!0;this.generateMipmaps=!0;this.shareDepthFrom=null}; +THREE.WebGLRenderTarget.prototype={constructor:THREE.WebGLRenderTarget,setSize:function(a,b){this.width=a;this.height=b},clone:function(){var a=new THREE.WebGLRenderTarget(this.width,this.height);a.wrapS=this.wrapS;a.wrapT=this.wrapT;a.magFilter=this.magFilter;a.minFilter=this.minFilter;a.anisotropy=this.anisotropy;a.offset.copy(this.offset);a.repeat.copy(this.repeat);a.format=this.format;a.type=this.type;a.depthBuffer=this.depthBuffer;a.stencilBuffer=this.stencilBuffer;a.generateMipmaps=this.generateMipmaps; +a.shareDepthFrom=this.shareDepthFrom;return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.WebGLRenderTarget.prototype);THREE.WebGLRenderTargetCube=function(a,b,c){THREE.WebGLRenderTarget.call(this,a,b,c);this.activeCubeFace=0};THREE.WebGLRenderTargetCube.prototype=Object.create(THREE.WebGLRenderTarget.prototype); +THREE.WebGLExtensions=function(a){var b={};this.get=function(c){if(void 0!==b[c])return b[c];var d;switch(c){case "OES_texture_float":d=a.getExtension("OES_texture_float");break;case "OES_texture_float_linear":d=a.getExtension("OES_texture_float_linear");break;case "OES_standard_derivatives":d=a.getExtension("OES_standard_derivatives");break;case "EXT_texture_filter_anisotropic":d=a.getExtension("EXT_texture_filter_anisotropic")||a.getExtension("MOZ_EXT_texture_filter_anisotropic")||a.getExtension("WEBKIT_EXT_texture_filter_anisotropic"); +break;case "WEBGL_compressed_texture_s3tc":d=a.getExtension("WEBGL_compressed_texture_s3tc")||a.getExtension("MOZ_WEBGL_compressed_texture_s3tc")||a.getExtension("WEBKIT_WEBGL_compressed_texture_s3tc");break;case "WEBGL_compressed_texture_pvrtc":d=a.getExtension("WEBGL_compressed_texture_pvrtc")||a.getExtension("WEBKIT_WEBGL_compressed_texture_pvrtc");break;case "OES_element_index_uint":d=a.getExtension("OES_element_index_uint");break;case "EXT_blend_minmax":d=a.getExtension("EXT_blend_minmax");break; +case "EXT_frag_depth":d=a.getExtension("EXT_frag_depth")}null===d&&console.log("THREE.WebGLRenderer: "+c+" extension not supported.");return b[c]=d}}; +THREE.WebGLProgram=function(){var a=0;return function(b,c,d,e){var f=b.context,g=d.defines,h=d.__webglShader.uniforms,k=d.attributes,n=d.__webglShader.vertexShader,p=d.__webglShader.fragmentShader,q=d.index0AttributeName;void 0===q&&!0===e.morphTargets&&(q="position");var m="SHADOWMAP_TYPE_BASIC";e.shadowMapType===THREE.PCFShadowMap?m="SHADOWMAP_TYPE_PCF":e.shadowMapType===THREE.PCFSoftShadowMap&&(m="SHADOWMAP_TYPE_PCF_SOFT");var r,t;r=[];for(var s in g)t=g[s],!1!==t&&(t="#define "+s+" "+t,r.push(t)); +r=r.join("\n");g=f.createProgram();d instanceof THREE.RawShaderMaterial?b=d="":(d=["precision "+e.precision+" float;","precision "+e.precision+" int;",r,e.supportsVertexTextures?"#define VERTEX_TEXTURES":"",b.gammaInput?"#define GAMMA_INPUT":"",b.gammaOutput?"#define GAMMA_OUTPUT":"","#define MAX_DIR_LIGHTS "+e.maxDirLights,"#define MAX_POINT_LIGHTS "+e.maxPointLights,"#define MAX_SPOT_LIGHTS "+e.maxSpotLights,"#define MAX_HEMI_LIGHTS "+e.maxHemiLights,"#define MAX_SHADOWS "+e.maxShadows,"#define MAX_BONES "+ +e.maxBones,e.map?"#define USE_MAP":"",e.envMap?"#define USE_ENVMAP":"",e.lightMap?"#define USE_LIGHTMAP":"",e.bumpMap?"#define USE_BUMPMAP":"",e.normalMap?"#define USE_NORMALMAP":"",e.specularMap?"#define USE_SPECULARMAP":"",e.alphaMap?"#define USE_ALPHAMAP":"",e.vertexColors?"#define USE_COLOR":"",e.skinning?"#define USE_SKINNING":"",e.useVertexTexture?"#define BONE_TEXTURE":"",e.morphTargets?"#define USE_MORPHTARGETS":"",e.morphNormals?"#define USE_MORPHNORMALS":"",e.wrapAround?"#define WRAP_AROUND": +"",e.doubleSided?"#define DOUBLE_SIDED":"",e.flipSided?"#define FLIP_SIDED":"",e.shadowMapEnabled?"#define USE_SHADOWMAP":"",e.shadowMapEnabled?"#define "+m:"",e.shadowMapDebug?"#define SHADOWMAP_DEBUG":"",e.shadowMapCascade?"#define SHADOWMAP_CASCADE":"",e.sizeAttenuation?"#define USE_SIZEATTENUATION":"",e.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"","uniform mat4 modelMatrix;\nuniform mat4 modelViewMatrix;\nuniform mat4 projectionMatrix;\nuniform mat4 viewMatrix;\nuniform mat3 normalMatrix;\nuniform vec3 cameraPosition;\nattribute vec3 position;\nattribute vec3 normal;\nattribute vec2 uv;\nattribute vec2 uv2;\n#ifdef USE_COLOR\n\tattribute vec3 color;\n#endif\n#ifdef USE_MORPHTARGETS\n\tattribute vec3 morphTarget0;\n\tattribute vec3 morphTarget1;\n\tattribute vec3 morphTarget2;\n\tattribute vec3 morphTarget3;\n\t#ifdef USE_MORPHNORMALS\n\t\tattribute vec3 morphNormal0;\n\t\tattribute vec3 morphNormal1;\n\t\tattribute vec3 morphNormal2;\n\t\tattribute vec3 morphNormal3;\n\t#else\n\t\tattribute vec3 morphTarget4;\n\t\tattribute vec3 morphTarget5;\n\t\tattribute vec3 morphTarget6;\n\t\tattribute vec3 morphTarget7;\n\t#endif\n#endif\n#ifdef USE_SKINNING\n\tattribute vec4 skinIndex;\n\tattribute vec4 skinWeight;\n#endif\n"].join("\n"), +b=["precision "+e.precision+" float;","precision "+e.precision+" int;",e.bumpMap||e.normalMap?"#extension GL_OES_standard_derivatives : enable":"",r,"#define MAX_DIR_LIGHTS "+e.maxDirLights,"#define MAX_POINT_LIGHTS "+e.maxPointLights,"#define MAX_SPOT_LIGHTS "+e.maxSpotLights,"#define MAX_HEMI_LIGHTS "+e.maxHemiLights,"#define MAX_SHADOWS "+e.maxShadows,e.alphaTest?"#define ALPHATEST "+e.alphaTest:"",b.gammaInput?"#define GAMMA_INPUT":"",b.gammaOutput?"#define GAMMA_OUTPUT":"",e.useFog&&e.fog?"#define USE_FOG": +"",e.useFog&&e.fogExp?"#define FOG_EXP2":"",e.map?"#define USE_MAP":"",e.envMap?"#define USE_ENVMAP":"",e.lightMap?"#define USE_LIGHTMAP":"",e.bumpMap?"#define USE_BUMPMAP":"",e.normalMap?"#define USE_NORMALMAP":"",e.specularMap?"#define USE_SPECULARMAP":"",e.alphaMap?"#define USE_ALPHAMAP":"",e.vertexColors?"#define USE_COLOR":"",e.metal?"#define METAL":"",e.wrapAround?"#define WRAP_AROUND":"",e.doubleSided?"#define DOUBLE_SIDED":"",e.flipSided?"#define FLIP_SIDED":"",e.shadowMapEnabled?"#define USE_SHADOWMAP": +"",e.shadowMapEnabled?"#define "+m:"",e.shadowMapDebug?"#define SHADOWMAP_DEBUG":"",e.shadowMapCascade?"#define SHADOWMAP_CASCADE":"",e.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"","uniform mat4 viewMatrix;\nuniform vec3 cameraPosition;\n"].join("\n"));n=new THREE.WebGLShader(f,f.VERTEX_SHADER,d+n);p=new THREE.WebGLShader(f,f.FRAGMENT_SHADER,b+p);f.attachShader(g,n);f.attachShader(g,p);void 0!==q&&f.bindAttribLocation(g,0,q);f.linkProgram(g);!1===f.getProgramParameter(g,f.LINK_STATUS)&&(console.error("THREE.WebGLProgram: Could not initialise shader."), +console.error("gl.VALIDATE_STATUS",f.getProgramParameter(g,f.VALIDATE_STATUS)),console.error("gl.getError()",f.getError()));""!==f.getProgramInfoLog(g)&&console.warn("THREE.WebGLProgram: gl.getProgramInfoLog()",f.getProgramInfoLog(g));f.deleteShader(n);f.deleteShader(p);q="viewMatrix modelViewMatrix projectionMatrix normalMatrix modelMatrix cameraPosition morphTargetInfluences bindMatrix bindMatrixInverse".split(" ");e.useVertexTexture?(q.push("boneTexture"),q.push("boneTextureWidth"),q.push("boneTextureHeight")): +q.push("boneGlobalMatrices");e.logarithmicDepthBuffer&&q.push("logDepthBufFC");for(var u in h)q.push(u);h=q;u={};q=0;for(b=h.length;qT;T++)F[T]=new THREE.Vector3,A[T]=new THREE.Vector3;F=B.shadowCascadeNearZ[C];B=B.shadowCascadeFarZ[C];A[0].set(-1,-1,F);A[1].set(1,-1,F);A[2].set(-1,1,F);A[3].set(1,1,F);A[4].set(-1,-1,B);A[5].set(1,-1,B);A[6].set(-1,1,B);A[7].set(1,1,B);H.originalCamera=v;A=new THREE.Gyroscope;A.position.copy(x.shadowCascadeOffset);A.add(H);A.add(H.target);v.add(A);x.shadowCascadeArray[E]=H;console.log("Created virtualLight",H)}C= +x;F=E;B=C.shadowCascadeArray[F];B.position.copy(C.position);B.target.position.copy(C.target.position);B.lookAt(B.target);B.shadowCameraVisible=C.shadowCameraVisible;B.shadowDarkness=C.shadowDarkness;B.shadowBias=C.shadowCascadeBias[F];A=C.shadowCascadeNearZ[F];C=C.shadowCascadeFarZ[F];B=B.pointsFrustum;B[0].z=A;B[1].z=A;B[2].z=A;B[3].z=A;B[4].z=C;B[5].z=C;B[6].z=C;B[7].z=C;R[D]=H;D++}else R[D]=x,D++;u=0;for(K=R.length;uC;C++)F=B[C],F.copy(A[C]),F.unproject(E),F.applyMatrix4(D.matrixWorldInverse),F.xr.x&&(r.x=F.x),F.yr.y&&(r.y=F.y),F.zr.z&&(r.z=F.z);D.left=m.x;D.right=r.x;D.top=r.y;D.bottom=m.y;D.updateProjectionMatrix()}D=x.shadowMap;A=x.shadowMatrix;E=x.shadowCamera;E.position.setFromMatrixPosition(x.matrixWorld);t.setFromMatrixPosition(x.target.matrixWorld);E.lookAt(t);E.updateMatrixWorld();E.matrixWorldInverse.getInverse(E.matrixWorld);x.cameraHelper&& +(x.cameraHelper.visible=x.shadowCameraVisible);x.shadowCameraVisible&&x.cameraHelper.update();A.set(.5,0,0,.5,0,.5,0,.5,0,0,.5,.5,0,0,0,1);A.multiply(E.projectionMatrix);A.multiply(E.matrixWorldInverse);q.multiplyMatrices(E.projectionMatrix,E.matrixWorldInverse);p.setFromMatrix(q);a.setRenderTarget(D);a.clear();s.length=0;e(c,c,E);x=0;for(D=s.length;x 0 ) {\nfloat depth = gl_FragCoord.z / gl_FragCoord.w;\nfloat fogFactor = 0.0;\nif ( fogType == 1 ) {\nfogFactor = smoothstep( fogNear, fogFar, depth );\n} else {\nconst float LOG2 = 1.442695;\nfloat fogFactor = exp2( - fogDensity * fogDensity * depth * depth * LOG2 );\nfogFactor = 1.0 - clamp( fogFactor, 0.0, 1.0 );\n}\ngl_FragColor = mix( gl_FragColor, vec4( fogColor, gl_FragColor.w ), fogFactor );\n}\n}"].join("\n")); +w.compileShader(R);w.compileShader(H);w.attachShader(F,R);w.attachShader(F,H);w.linkProgram(F);D=F;v=w.getAttribLocation(D,"position");y=w.getAttribLocation(D,"uv");c=w.getUniformLocation(D,"uvOffset");d=w.getUniformLocation(D,"uvScale");e=w.getUniformLocation(D,"rotation");f=w.getUniformLocation(D,"scale");g=w.getUniformLocation(D,"color");h=w.getUniformLocation(D,"map");k=w.getUniformLocation(D,"opacity");n=w.getUniformLocation(D,"modelViewMatrix");p=w.getUniformLocation(D,"projectionMatrix");q= +w.getUniformLocation(D,"fogType");m=w.getUniformLocation(D,"fogDensity");r=w.getUniformLocation(D,"fogNear");t=w.getUniformLocation(D,"fogFar");s=w.getUniformLocation(D,"fogColor");u=w.getUniformLocation(D,"alphaTest");F=document.createElement("canvas");F.width=8;F.height=8;R=F.getContext("2d");R.fillStyle="white";R.fillRect(0,0,8,8);E=new THREE.Texture(F);E.needsUpdate=!0}w.useProgram(D);w.enableVertexAttribArray(v);w.enableVertexAttribArray(y);w.disable(w.CULL_FACE);w.enable(w.BLEND);w.bindBuffer(w.ARRAY_BUFFER, +K);w.vertexAttribPointer(v,2,w.FLOAT,!1,16,0);w.vertexAttribPointer(y,2,w.FLOAT,!1,16,8);w.bindBuffer(w.ELEMENT_ARRAY_BUFFER,x);w.uniformMatrix4fv(p,!1,B.projectionMatrix.elements);w.activeTexture(w.TEXTURE0);w.uniform1i(h,0);R=F=0;(H=A.fog)?(w.uniform3f(s,H.color.r,H.color.g,H.color.b),H instanceof THREE.Fog?(w.uniform1f(r,H.near),w.uniform1f(t,H.far),w.uniform1i(q,1),R=F=1):H instanceof THREE.FogExp2&&(w.uniform1f(m,H.density),w.uniform1i(q,2),R=F=2)):(w.uniform1i(q,0),R=F=0);for(var H=0,C=b.length;H< +C;H++){var T=b[H];T._modelViewMatrix.multiplyMatrices(B.matrixWorldInverse,T.matrixWorld);T.z=null===T.renderDepth?-T._modelViewMatrix.elements[14]:T.renderDepth}b.sort(G);for(var Q=[],H=0,C=b.length;Hq-1?0:q-1,r=q+1>e-1?e-1:q+1,t=0>p-1?0:p-1,s=p+1>d-1?d-1:p+1,u=[],v=[0,0,h[4*(q*d+p)]/255*b];u.push([-1,0,h[4*(q*d+t)]/255*b]);u.push([-1,-1,h[4*(m*d+t)]/255*b]);u.push([0,-1,h[4*(m*d+p)]/255*b]);u.push([1,-1,h[4*(m*d+s)]/255*b]);u.push([1,0,h[4*(q*d+s)]/255*b]);u.push([1,1,h[4*(r*d+s)]/255*b]);u.push([0,1,h[4*(r*d+p)]/255* +b]);u.push([-1,1,h[4*(r*d+t)]/255*b]);m=[];t=u.length;for(r=0;re)return null;var f=[],g=[],h=[],k,n,p;if(0=q--){console.log("Warning, unable to triangulate polygon!");break}k=n;e<=k&&(k=0);n=k+1;e<=n&&(n=0);p=n+1;e<=p&&(p=0);var m;a:{var r=m=void 0,t=void 0,s=void 0,u=void 0,v=void 0,y=void 0,G=void 0,w=void 0, +r=a[g[k]].x,t=a[g[k]].y,s=a[g[n]].x,u=a[g[n]].y,v=a[g[p]].x,y=a[g[p]].y;if(1E-10>(s-r)*(y-t)-(u-t)*(v-r))m=!1;else{var K=void 0,x=void 0,D=void 0,E=void 0,A=void 0,B=void 0,F=void 0,R=void 0,H=void 0,C=void 0,H=R=F=w=G=void 0,K=v-s,x=y-u,D=r-v,E=t-y,A=s-r,B=u-t;for(m=0;mk)g=d+1;else if(0b&&(b=0);1=b)return b=c[a]-b,a=this.curves[a],b=1-b/a.getLength(),a.getPointAt(b);a++}return null};THREE.CurvePath.prototype.getLength=function(){var a=this.getCurveLengths();return a[a.length-1]}; +THREE.CurvePath.prototype.getCurveLengths=function(){if(this.cacheLengths&&this.cacheLengths.length==this.curves.length)return this.cacheLengths;var a=[],b=0,c,d=this.curves.length;for(c=0;cb?b=h.x:h.xc?c=h.y:h.yd?d=h.z:h.zMath.abs(d.x-c[0].x)&&1E-10>Math.abs(d.y-c[0].y)&&c.splice(c.length-1,1);b&&c.push(c[0]);return c}; +THREE.Path.prototype.toShapes=function(a,b){function c(a){for(var b=[],c=0,d=a.length;cm&&(g=b[f],k=-k,h=b[e],m=-m),!(a.yh.y))if(a.y==g.y){if(a.x==g.x)return!0}else{e=m*(a.x-g.x)-k*(a.y-g.y);if(0==e)return!0;0>e||(d=!d)}}else if(a.y==g.y&&(h.x<=a.x&&a.x<=g.x||g.x<=a.x&&a.x<= +h.x))return!0}return d}var e=function(a){var b,c,d,e,f=[],g=new THREE.Path;b=0;for(c=a.length;bE||E>D)return[];k=n*p-k*q;if(0>k||k>D)return[]}else{if(0d?[]:k==d?f?[]:[g]:a<=d?[g,h]: +[g,n]}function e(a,b,c,d){var e=b.x-a.x,f=b.y-a.y;b=c.x-a.x;c=c.y-a.y;var g=d.x-a.x;d=d.y-a.y;a=e*c-f*b;e=e*d-f*g;return 1E-10f&&(f=d);var g=a+1;g>d&&(g=0);d=e(h[a],h[f],h[g],k[b]);if(!d)return!1; +d=k.length-1;f=b-1;0>f&&(f=d);g=b+1;g>d&&(g=0);return(d=e(k[b],k[f],k[g],h[a]))?!0:!1}function f(a,b){var c,e;for(c=0;cC){console.log("Infinite Loop! Holes left:"+ +n.length+", Probably Hole outside Shape!");break}for(q=B;qh;h++)n=k[h].x+":"+k[h].y, +n=p[n],void 0!==n&&(k[h]=n);return q.concat()},isClockWise:function(a){return 0>THREE.FontUtils.Triangulate.area(a)},b2p0:function(a,b){var c=1-a;return c*c*b},b2p1:function(a,b){return 2*(1-a)*a*b},b2p2:function(a,b){return a*a*b},b2:function(a,b,c,d){return this.b2p0(a,b)+this.b2p1(a,c)+this.b2p2(a,d)},b3p0:function(a,b){var c=1-a;return c*c*c*b},b3p1:function(a,b){var c=1-a;return 3*c*c*a*b},b3p2:function(a,b){return 3*(1-a)*a*a*b},b3p3:function(a,b){return a*a*a*b},b3:function(a,b,c,d,e){return this.b3p0(a, +b)+this.b3p1(a,c)+this.b3p2(a,d)+this.b3p3(a,e)}};THREE.LineCurve=function(a,b){this.v1=a;this.v2=b};THREE.LineCurve.prototype=Object.create(THREE.Curve.prototype);THREE.LineCurve.prototype.getPoint=function(a){var b=this.v2.clone().sub(this.v1);b.multiplyScalar(a).add(this.v1);return b};THREE.LineCurve.prototype.getPointAt=function(a){return this.getPoint(a)};THREE.LineCurve.prototype.getTangent=function(a){return this.v2.clone().sub(this.v1).normalize()}; +THREE.QuadraticBezierCurve=function(a,b,c){this.v0=a;this.v1=b;this.v2=c};THREE.QuadraticBezierCurve.prototype=Object.create(THREE.Curve.prototype);THREE.QuadraticBezierCurve.prototype.getPoint=function(a){var b=new THREE.Vector2;b.x=THREE.Shape.Utils.b2(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Shape.Utils.b2(a,this.v0.y,this.v1.y,this.v2.y);return b}; +THREE.QuadraticBezierCurve.prototype.getTangent=function(a){var b=new THREE.Vector2;b.x=THREE.Curve.Utils.tangentQuadraticBezier(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Curve.Utils.tangentQuadraticBezier(a,this.v0.y,this.v1.y,this.v2.y);return b.normalize()};THREE.CubicBezierCurve=function(a,b,c,d){this.v0=a;this.v1=b;this.v2=c;this.v3=d};THREE.CubicBezierCurve.prototype=Object.create(THREE.Curve.prototype); +THREE.CubicBezierCurve.prototype.getPoint=function(a){var b;b=THREE.Shape.Utils.b3(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);a=THREE.Shape.Utils.b3(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);return new THREE.Vector2(b,a)};THREE.CubicBezierCurve.prototype.getTangent=function(a){var b;b=THREE.Curve.Utils.tangentCubicBezier(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);a=THREE.Curve.Utils.tangentCubicBezier(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);b=new THREE.Vector2(b,a);b.normalize();return b}; +THREE.SplineCurve=function(a){this.points=void 0==a?[]:a};THREE.SplineCurve.prototype=Object.create(THREE.Curve.prototype);THREE.SplineCurve.prototype.getPoint=function(a){var b=this.points;a*=b.length-1;var c=Math.floor(a);a-=c;var d=b[0==c?c:c-1],e=b[c],f=b[c>b.length-2?b.length-1:c+1],b=b[c>b.length-3?b.length-1:c+2],c=new THREE.Vector2;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);return c}; +THREE.EllipseCurve=function(a,b,c,d,e,f,g){this.aX=a;this.aY=b;this.xRadius=c;this.yRadius=d;this.aStartAngle=e;this.aEndAngle=f;this.aClockwise=g};THREE.EllipseCurve.prototype=Object.create(THREE.Curve.prototype); +THREE.EllipseCurve.prototype.getPoint=function(a){var b=this.aEndAngle-this.aStartAngle;0>b&&(b+=2*Math.PI);b>2*Math.PI&&(b-=2*Math.PI);a=!0===this.aClockwise?this.aEndAngle+(1-a)*(2*Math.PI-b):this.aStartAngle+a*b;b=new THREE.Vector2;b.x=this.aX+this.xRadius*Math.cos(a);b.y=this.aY+this.yRadius*Math.sin(a);return b};THREE.ArcCurve=function(a,b,c,d,e,f){THREE.EllipseCurve.call(this,a,b,c,c,d,e,f)};THREE.ArcCurve.prototype=Object.create(THREE.EllipseCurve.prototype); +THREE.LineCurve3=THREE.Curve.create(function(a,b){this.v1=a;this.v2=b},function(a){var b=new THREE.Vector3;b.subVectors(this.v2,this.v1);b.multiplyScalar(a);b.add(this.v1);return b});THREE.QuadraticBezierCurve3=THREE.Curve.create(function(a,b,c){this.v0=a;this.v1=b;this.v2=c},function(a){var b=new THREE.Vector3;b.x=THREE.Shape.Utils.b2(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Shape.Utils.b2(a,this.v0.y,this.v1.y,this.v2.y);b.z=THREE.Shape.Utils.b2(a,this.v0.z,this.v1.z,this.v2.z);return b}); +THREE.CubicBezierCurve3=THREE.Curve.create(function(a,b,c,d){this.v0=a;this.v1=b;this.v2=c;this.v3=d},function(a){var b=new THREE.Vector3;b.x=THREE.Shape.Utils.b3(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);b.y=THREE.Shape.Utils.b3(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);b.z=THREE.Shape.Utils.b3(a,this.v0.z,this.v1.z,this.v2.z,this.v3.z);return b}); +THREE.SplineCurve3=THREE.Curve.create(function(a){this.points=void 0==a?[]:a},function(a){var b=this.points;a*=b.length-1;var c=Math.floor(a);a-=c;var d=b[0==c?c:c-1],e=b[c],f=b[c>b.length-2?b.length-1:c+1],b=b[c>b.length-3?b.length-1:c+2],c=new THREE.Vector3;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);c.z=THREE.Curve.Utils.interpolate(d.z,e.z,f.z,b.z,a);return c}); +THREE.ClosedSplineCurve3=THREE.Curve.create(function(a){this.points=void 0==a?[]:a},function(a){var b=this.points;a*=b.length-0;var c=Math.floor(a);a-=c;var c=c+(0a.hierarchy[b].keys[c].time&&(a.hierarchy[b].keys[c].time= +0),void 0!==a.hierarchy[b].keys[c].rot&&!(a.hierarchy[b].keys[c].rot instanceof THREE.Quaternion)){var d=a.hierarchy[b].keys[c].rot;a.hierarchy[b].keys[c].rot=(new THREE.Quaternion).fromArray(d)}if(a.hierarchy[b].keys.length&&void 0!==a.hierarchy[b].keys[0].morphTargets){d={};for(c=0;cd;d++){for(var e=this.keyTypes[d],f=this.data.hierarchy[a].keys[0],g=this.getNextKeyWith(e,a,1);g.timef.index;)f=g,g=this.getNextKeyWith(e,a,g.index+1);c.prevKey[e]=f;c.nextKey[e]=g}}}; +THREE.Animation.prototype.resetBlendWeights=function(){for(var a=0,b=this.hierarchy.length;aa.length-2?q:q+1;c[3]=q>a.length-3?q:q+2;q=a[c[0]];r=a[c[1]];t=a[c[2]];s=a[c[3]];c=e*e;m=e*c;d[0]=f(q[0],r[0],t[0],s[0],e,c,m);d[1]=f(q[1],r[1],t[1],s[1],e,c,m);d[2]=f(q[2],r[2],t[2],s[2],e,c,m);return d},f=function(a,b,c,d,e,f,m){a=.5*(c-a);d=.5*(d-b);return(2*(b-c)+a+d)*m+ +(-3*(b-c)-2*a-d)*f+a*e+b};return function(f){if(!1!==this.isPlaying&&(this.currentTime+=f*this.timeScale,0!==this.weight)){f=this.data.length;if(this.currentTime>f||0>this.currentTime)if(this.loop)this.currentTime%=f,0>this.currentTime&&(this.currentTime+=f),this.reset();else{this.stop();return}f=0;for(var h=this.hierarchy.length;fq;q++){var m=this.keyTypes[q],r=n.prevKey[m],t=n.nextKey[m]; +if(0this.timeScale&&r.time>=this.currentTime){r=this.data.hierarchy[f].keys[0];for(t=this.getNextKeyWith(m,f,1);t.timer.index;)r=t,t=this.getNextKeyWith(m,f,t.index+1);n.prevKey[m]=r;n.nextKey[m]=t}k.matrixAutoUpdate=!0;k.matrixWorldNeedsUpdate=!0;var s=(this.currentTime-r.time)/(t.time-r.time),u=r[m],v=t[m];0>s&&(s=0);1a&&(this.currentTime%=a);this.currentTime=Math.min(this.currentTime,a);a=0;for(var b=this.hierarchy.length;af.index;)f=g,g=e[f.index+1];d.prevKey= +f;d.nextKey=g}g.time>=this.currentTime?f.interpolate(g,this.currentTime):f.interpolate(g,g.time);this.data.hierarchy[a].node.updateMatrix();c.matrixWorldNeedsUpdate=!0}}}};THREE.KeyFrameAnimation.prototype.getNextKeyWith=function(a,b,c){b=this.data.hierarchy[b].keys;for(c%=b.length;cthis.duration&&(this.currentTime%=this.duration);this.currentTime=Math.min(this.currentTime,this.duration);c=this.duration/this.frames;var d=Math.floor(this.currentTime/c);d!=b&&(this.mesh.morphTargetInfluences[a]=0,this.mesh.morphTargetInfluences[b]=1,this.mesh.morphTargetInfluences[d]= +0,a=b,b=d);this.mesh.morphTargetInfluences[d]=this.currentTime%c/c;this.mesh.morphTargetInfluences[a]=1-this.mesh.morphTargetInfluences[d]}}}()}; +THREE.BoxGeometry=function(a,b,c,d,e,f){function g(a,b,c,d,e,f,g,s){var u,v=h.widthSegments,y=h.heightSegments,G=e/2,w=f/2,K=h.vertices.length;if("x"===a&&"y"===b||"y"===a&&"x"===b)u="z";else if("x"===a&&"z"===b||"z"===a&&"x"===b)u="y",y=h.depthSegments;else if("z"===a&&"y"===b||"y"===a&&"z"===b)u="x",v=h.depthSegments;var x=v+1,D=y+1,E=e/v,A=f/y,B=new THREE.Vector3;B[u]=0=d)return new THREE.Vector2(c,a);d=Math.sqrt(d/2)}else a=!1,1E-10d?-1E-10>f&&(a=!0):Math.sign(e)== +Math.sign(g)&&(a=!0),a?(c=-e,a=d,d=Math.sqrt(h)):(c=d,a=e,d=Math.sqrt(h/2));return new THREE.Vector2(c/d,a/d)}function e(a,b){var c,d;for(P=a.length;0<=--P;){c=P;d=P-1;0>d&&(d=a.length-1);for(var e=0,f=r+2*p,e=0;eMath.abs(b.y-c.y)?[new THREE.Vector2(b.x,1-b.z),new THREE.Vector2(c.x,1-c.z),new THREE.Vector2(d.x,1-d.z),new THREE.Vector2(e.x,1-e.z)]:[new THREE.Vector2(b.y,1-b.z),new THREE.Vector2(c.y,1-c.z),new THREE.Vector2(d.y, +1-d.z),new THREE.Vector2(e.y,1-e.z)]}};THREE.ShapeGeometry=function(a,b){THREE.Geometry.call(this);this.type="ShapeGeometry";!1===a instanceof Array&&(a=[a]);this.addShapeList(a,b);this.computeFaceNormals()};THREE.ShapeGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.ShapeGeometry.prototype.addShapeList=function(a,b){for(var c=0,d=a.length;cc&&1===a.x&&(a=new THREE.Vector2(a.x-1,a.y));0===b.x&&0===b.z&&(a=new THREE.Vector2(c/2/Math.PI+.5, +a.y));return a.clone()}THREE.Geometry.call(this);this.type="PolyhedronGeometry";this.parameters={vertices:a,indices:b,radius:c,detail:d};c=c||1;d=d||0;for(var k=this,n=0,p=a.length;nr&&(.2>d&&(b[0].x+=1),.2>a&&(b[1].x+=1),.2>q&&(b[2].x+=1));n=0;for(p=this.vertices.length;nc.y?this.quaternion.set(1,0,0,0):(a.set(c.z,0,-c.x).normalize(),b=Math.acos(c.y),this.quaternion.setFromAxisAngle(a,b))}}(); +THREE.ArrowHelper.prototype.setLength=function(a,b,c){void 0===b&&(b=.2*a);void 0===c&&(c=.2*b);this.line.scale.set(1,a,1);this.line.updateMatrix();this.cone.scale.set(c,b,c);this.cone.position.y=a;this.cone.updateMatrix()};THREE.ArrowHelper.prototype.setColor=function(a){this.line.material.color.set(a);this.cone.material.color.set(a)}; +THREE.BoxHelper=function(a){var b=new THREE.BufferGeometry;b.addAttribute("position",new THREE.BufferAttribute(new Float32Array(72),3));THREE.Line.call(this,b,new THREE.LineBasicMaterial({color:16776960}),THREE.LinePieces);void 0!==a&&this.update(a)};THREE.BoxHelper.prototype=Object.create(THREE.Line.prototype); +THREE.BoxHelper.prototype.update=function(a){var b=a.geometry;null===b.boundingBox&&b.computeBoundingBox();var c=b.boundingBox.min,b=b.boundingBox.max,d=this.geometry.attributes.position.array;d[0]=b.x;d[1]=b.y;d[2]=b.z;d[3]=c.x;d[4]=b.y;d[5]=b.z;d[6]=c.x;d[7]=b.y;d[8]=b.z;d[9]=c.x;d[10]=c.y;d[11]=b.z;d[12]=c.x;d[13]=c.y;d[14]=b.z;d[15]=b.x;d[16]=c.y;d[17]=b.z;d[18]=b.x;d[19]=c.y;d[20]=b.z;d[21]=b.x;d[22]=b.y;d[23]=b.z;d[24]=b.x;d[25]=b.y;d[26]=c.z;d[27]=c.x;d[28]=b.y;d[29]=c.z;d[30]=c.x;d[31]=b.y; +d[32]=c.z;d[33]=c.x;d[34]=c.y;d[35]=c.z;d[36]=c.x;d[37]=c.y;d[38]=c.z;d[39]=b.x;d[40]=c.y;d[41]=c.z;d[42]=b.x;d[43]=c.y;d[44]=c.z;d[45]=b.x;d[46]=b.y;d[47]=c.z;d[48]=b.x;d[49]=b.y;d[50]=b.z;d[51]=b.x;d[52]=b.y;d[53]=c.z;d[54]=c.x;d[55]=b.y;d[56]=b.z;d[57]=c.x;d[58]=b.y;d[59]=c.z;d[60]=c.x;d[61]=c.y;d[62]=b.z;d[63]=c.x;d[64]=c.y;d[65]=c.z;d[66]=b.x;d[67]=c.y;d[68]=b.z;d[69]=b.x;d[70]=c.y;d[71]=c.z;this.geometry.attributes.position.needsUpdate=!0;this.geometry.computeBoundingSphere();this.matrix=a.matrixWorld; +this.matrixAutoUpdate=!1};THREE.BoundingBoxHelper=function(a,b){var c=void 0!==b?b:8947848;this.object=a;this.box=new THREE.Box3;THREE.Mesh.call(this,new THREE.BoxGeometry(1,1,1),new THREE.MeshBasicMaterial({color:c,wireframe:!0}))};THREE.BoundingBoxHelper.prototype=Object.create(THREE.Mesh.prototype);THREE.BoundingBoxHelper.prototype.update=function(){this.box.setFromObject(this.object);this.box.size(this.scale);this.box.center(this.position)}; +THREE.CameraHelper=function(a){function b(a,b,d){c(a,d);c(b,d)}function c(a,b){d.vertices.push(new THREE.Vector3);d.colors.push(new THREE.Color(b));void 0===f[a]&&(f[a]=[]);f[a].push(d.vertices.length-1)}var d=new THREE.Geometry,e=new THREE.LineBasicMaterial({color:16777215,vertexColors:THREE.FaceColors}),f={};b("n1","n2",16755200);b("n2","n4",16755200);b("n4","n3",16755200);b("n3","n1",16755200);b("f1","f2",16755200);b("f2","f4",16755200);b("f4","f3",16755200);b("f3","f1",16755200);b("n1","f1",16755200); +b("n2","f2",16755200);b("n3","f3",16755200);b("n4","f4",16755200);b("p","n1",16711680);b("p","n2",16711680);b("p","n3",16711680);b("p","n4",16711680);b("u1","u2",43775);b("u2","u3",43775);b("u3","u1",43775);b("c","t",16777215);b("p","c",3355443);b("cn1","cn2",3355443);b("cn3","cn4",3355443);b("cf1","cf2",3355443);b("cf3","cf4",3355443);THREE.Line.call(this,d,e,THREE.LinePieces);this.camera=a;this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1;this.pointMap=f;this.update()}; +THREE.CameraHelper.prototype=Object.create(THREE.Line.prototype); +THREE.CameraHelper.prototype.update=function(){var a,b,c=new THREE.Vector3,d=new THREE.Camera,e=function(e,g,h,k){c.set(g,h,k).unproject(d);e=b[e];if(void 0!==e)for(g=0,h=e.length;gt;t++){d[0]=r[g[t]];d[1]=r[g[(t+1)%3]];d.sort(f);var s=d.toString();void 0===e[s]?(e[s]={vert1:d[0],vert2:d[1],face1:q,face2:void 0},p++):e[s].face2=q}d=new Float32Array(6*p);f=0;for(s in e)if(g=e[s],void 0===g.face2|| +.9999>k[g.face1].normal.dot(k[g.face2].normal))p=n[g.vert1],d[f++]=p.x,d[f++]=p.y,d[f++]=p.z,p=n[g.vert2],d[f++]=p.x,d[f++]=p.y,d[f++]=p.z;h.addAttribute("position",new THREE.BufferAttribute(d,3));THREE.Line.call(this,h,new THREE.LineBasicMaterial({color:c}),THREE.LinePieces);this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1};THREE.EdgesHelper.prototype=Object.create(THREE.Line.prototype); +THREE.FaceNormalsHelper=function(a,b,c,d){this.object=a;this.size=void 0!==b?b:1;a=void 0!==c?c:16776960;d=void 0!==d?d:1;b=new THREE.Geometry;c=0;for(var e=this.object.geometry.faces.length;cb;b++)a.faces[b].color=this.colors[4>b?0:1];b=new THREE.MeshBasicMaterial({vertexColors:THREE.FaceColors,wireframe:!0});this.lightSphere=new THREE.Mesh(a,b);this.add(this.lightSphere); +this.update()};THREE.HemisphereLightHelper.prototype=Object.create(THREE.Object3D.prototype);THREE.HemisphereLightHelper.prototype.dispose=function(){this.lightSphere.geometry.dispose();this.lightSphere.material.dispose()}; +THREE.HemisphereLightHelper.prototype.update=function(){var a=new THREE.Vector3;return function(){this.colors[0].copy(this.light.color).multiplyScalar(this.light.intensity);this.colors[1].copy(this.light.groundColor).multiplyScalar(this.light.intensity);this.lightSphere.lookAt(a.setFromMatrixPosition(this.light.matrixWorld).negate());this.lightSphere.geometry.colorsNeedUpdate=!0}}(); +THREE.PointLightHelper=function(a,b){this.light=a;this.light.updateMatrixWorld();var c=new THREE.SphereGeometry(b,4,2),d=new THREE.MeshBasicMaterial({wireframe:!0,fog:!1});d.color.copy(this.light.color).multiplyScalar(this.light.intensity);THREE.Mesh.call(this,c,d);this.matrix=this.light.matrixWorld;this.matrixAutoUpdate=!1};THREE.PointLightHelper.prototype=Object.create(THREE.Mesh.prototype);THREE.PointLightHelper.prototype.dispose=function(){this.geometry.dispose();this.material.dispose()}; +THREE.PointLightHelper.prototype.update=function(){this.material.color.copy(this.light.color).multiplyScalar(this.light.intensity)}; +THREE.SkeletonHelper=function(a){this.bones=this.getBoneList(a);for(var b=new THREE.Geometry,c=0;cs;s++){d[0]=t[g[s]];d[1]=t[g[(s+1)%3]];d.sort(f);var u=d.toString();void 0===e[u]&&(q[2*p]=d[0],q[2*p+1]=d[1],e[u]=!0,p++)}d=new Float32Array(6*p);m=0;for(r=p;ms;s++)p= +k[q[2*m+s]],g=6*m+3*s,d[g+0]=p.x,d[g+1]=p.y,d[g+2]=p.z;h.addAttribute("position",new THREE.BufferAttribute(d,3))}else if(a.geometry instanceof THREE.BufferGeometry){if(void 0!==a.geometry.attributes.index){k=a.geometry.attributes.position.array;r=a.geometry.attributes.index.array;n=a.geometry.drawcalls;p=0;0===n.length&&(n=[{count:r.length,index:0,start:0}]);for(var q=new Uint32Array(2*r.length),t=0,v=n.length;ts;s++)d[0]= +g+r[m+s],d[1]=g+r[m+(s+1)%3],d.sort(f),u=d.toString(),void 0===e[u]&&(q[2*p]=d[0],q[2*p+1]=d[1],e[u]=!0,p++);d=new Float32Array(6*p);m=0;for(r=p;ms;s++)g=6*m+3*s,p=3*q[2*m+s],d[g+0]=k[p],d[g+1]=k[p+1],d[g+2]=k[p+2]}else for(k=a.geometry.attributes.position.array,p=k.length/3,q=p/3,d=new Float32Array(6*p),m=0,r=q;ms;s++)g=18*m+6*s,q=9*m+3*s,d[g+0]=k[q],d[g+1]=k[q+1],d[g+2]=k[q+2],p=9*m+(s+1)%3*3,d[g+3]=k[p],d[g+4]=k[p+1],d[g+5]=k[p+2];h.addAttribute("position",new THREE.BufferAttribute(d, +3))}THREE.Line.call(this,h,new THREE.LineBasicMaterial({color:c}),THREE.LinePieces);this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1};THREE.WireframeHelper.prototype=Object.create(THREE.Line.prototype);THREE.ImmediateRenderObject=function(){THREE.Object3D.call(this);this.render=function(a){}};THREE.ImmediateRenderObject.prototype=Object.create(THREE.Object3D.prototype); +THREE.MorphBlendMesh=function(a,b){THREE.Mesh.call(this,a,b);this.animationsMap={};this.animationsList=[];var c=this.geometry.morphTargets.length;this.createAnimation("__default",0,c-1,c/1);this.setAnimationWeight("__default",1)};THREE.MorphBlendMesh.prototype=Object.create(THREE.Mesh.prototype); +THREE.MorphBlendMesh.prototype.createAnimation=function(a,b,c,d){b={startFrame:b,endFrame:c,length:c-b+1,fps:d,duration:(c-b)/d,lastFrame:0,currentFrame:0,active:!1,time:0,direction:1,weight:1,directionBackwards:!1,mirroredLoop:!1};this.animationsMap[a]=b;this.animationsList.push(b)}; +THREE.MorphBlendMesh.prototype.autoCreateAnimations=function(a){for(var b=/([a-z]+)_?(\d+)/,c,d={},e=this.geometry,f=0,g=e.morphTargets.length;fh.end&&(h.end=f);c||(c=k)}}for(k in d)h=d[k],this.createAnimation(k,h.start,h.end,a);this.firstAnimation=c}; +THREE.MorphBlendMesh.prototype.setAnimationDirectionForward=function(a){if(a=this.animationsMap[a])a.direction=1,a.directionBackwards=!1};THREE.MorphBlendMesh.prototype.setAnimationDirectionBackward=function(a){if(a=this.animationsMap[a])a.direction=-1,a.directionBackwards=!0};THREE.MorphBlendMesh.prototype.setAnimationFPS=function(a,b){var c=this.animationsMap[a];c&&(c.fps=b,c.duration=(c.end-c.start)/c.fps)}; +THREE.MorphBlendMesh.prototype.setAnimationDuration=function(a,b){var c=this.animationsMap[a];c&&(c.duration=b,c.fps=(c.end-c.start)/c.duration)};THREE.MorphBlendMesh.prototype.setAnimationWeight=function(a,b){var c=this.animationsMap[a];c&&(c.weight=b)};THREE.MorphBlendMesh.prototype.setAnimationTime=function(a,b){var c=this.animationsMap[a];c&&(c.time=b)};THREE.MorphBlendMesh.prototype.getAnimationTime=function(a){var b=0;if(a=this.animationsMap[a])b=a.time;return b}; +THREE.MorphBlendMesh.prototype.getAnimationDuration=function(a){var b=-1;if(a=this.animationsMap[a])b=a.duration;return b};THREE.MorphBlendMesh.prototype.playAnimation=function(a){var b=this.animationsMap[a];b?(b.time=0,b.active=!0):console.warn("animation["+a+"] undefined")};THREE.MorphBlendMesh.prototype.stopAnimation=function(a){if(a=this.animationsMap[a])a.active=!1}; +THREE.MorphBlendMesh.prototype.update=function(a){for(var b=0,c=this.animationsList.length;bd.duration||0>d.time)d.direction*=-1,d.time>d.duration&&(d.time=d.duration,d.directionBackwards=!0),0>d.time&&(d.time=0,d.directionBackwards=!1)}else d.time%=d.duration,0>d.time&&(d.time+=d.duration);var f=d.startFrame+THREE.Math.clamp(Math.floor(d.time/e),0,d.length-1),g=d.weight; +f!==d.currentFrame&&(this.morphTargetInfluences[d.lastFrame]=0,this.morphTargetInfluences[d.currentFrame]=1*g,this.morphTargetInfluences[f]=0,d.lastFrame=d.currentFrame,d.currentFrame=f);e=d.time%e/e;d.directionBackwards&&(e=1-e);this.morphTargetInfluences[d.currentFrame]=e*g;this.morphTargetInfluences[d.lastFrame]=(1-e)*g}}}; diff --git a/plugins/Sidebar/media-globe/globe.js b/plugins/Sidebar/media-globe/globe.js new file mode 100644 index 00000000..a6ed358a --- /dev/null +++ b/plugins/Sidebar/media-globe/globe.js @@ -0,0 +1,424 @@ +/** + * dat.globe Javascript WebGL Globe Toolkit + * http://dataarts.github.com/dat.globe + * + * Copyright 2011 Data Arts Team, Google Creative Lab + * + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ + +var DAT = DAT || {}; + +DAT.Globe = function(container, opts) { + opts = opts || {}; + + var colorFn = opts.colorFn || function(x) { + var c = new THREE.Color(); + c.setHSL( ( 0.5 - (x * 2) ), Math.max(0.8, 1.0 - (x * 3)), 0.5 ); + return c; + }; + var imgDir = opts.imgDir || '/globe/'; + + var Shaders = { + 'earth' : { + uniforms: { + 'texture': { type: 't', value: null } + }, + vertexShader: [ + 'varying vec3 vNormal;', + 'varying vec2 vUv;', + 'void main() {', + 'gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );', + 'vNormal = normalize( normalMatrix * normal );', + 'vUv = uv;', + '}' + ].join('\n'), + fragmentShader: [ + 'uniform sampler2D texture;', + 'varying vec3 vNormal;', + 'varying vec2 vUv;', + 'void main() {', + 'vec3 diffuse = texture2D( texture, vUv ).xyz;', + 'float intensity = 1.05 - dot( vNormal, vec3( 0.0, 0.0, 1.0 ) );', + 'vec3 atmosphere = vec3( 1.0, 1.0, 1.0 ) * pow( intensity, 3.0 );', + 'gl_FragColor = vec4( diffuse + atmosphere, 1.0 );', + '}' + ].join('\n') + }, + 'atmosphere' : { + uniforms: {}, + vertexShader: [ + 'varying vec3 vNormal;', + 'void main() {', + 'vNormal = normalize( normalMatrix * normal );', + 'gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );', + '}' + ].join('\n'), + fragmentShader: [ + 'varying vec3 vNormal;', + 'void main() {', + 'float intensity = pow( 0.8 - dot( vNormal, vec3( 0, 0, 1.0 ) ), 12.0 );', + 'gl_FragColor = vec4( 1.0, 1.0, 1.0, 1.0 ) * intensity;', + '}' + ].join('\n') + } + }; + + var camera, scene, renderer, w, h; + var mesh, atmosphere, point, running; + + var overRenderer; + var running = true; + + var curZoomSpeed = 0; + var zoomSpeed = 50; + + var mouse = { x: 0, y: 0 }, mouseOnDown = { x: 0, y: 0 }; + var rotation = { x: 0, y: 0 }, + target = { x: Math.PI*3/2, y: Math.PI / 6.0 }, + targetOnDown = { x: 0, y: 0 }; + + var distance = 100000, distanceTarget = 100000; + var padding = 10; + var PI_HALF = Math.PI / 2; + + function init() { + + container.style.color = '#fff'; + container.style.font = '13px/20px Arial, sans-serif'; + + var shader, uniforms, material; + w = container.offsetWidth || window.innerWidth; + h = container.offsetHeight || window.innerHeight; + + camera = new THREE.PerspectiveCamera(30, w / h, 1, 10000); + camera.position.z = distance; + + scene = new THREE.Scene(); + + var geometry = new THREE.SphereGeometry(200, 40, 30); + + shader = Shaders['earth']; + uniforms = THREE.UniformsUtils.clone(shader.uniforms); + + uniforms['texture'].value = THREE.ImageUtils.loadTexture(imgDir+'world.jpg'); + + material = new THREE.ShaderMaterial({ + + uniforms: uniforms, + vertexShader: shader.vertexShader, + fragmentShader: shader.fragmentShader + + }); + + mesh = new THREE.Mesh(geometry, material); + mesh.rotation.y = Math.PI; + scene.add(mesh); + + shader = Shaders['atmosphere']; + uniforms = THREE.UniformsUtils.clone(shader.uniforms); + + material = new THREE.ShaderMaterial({ + + uniforms: uniforms, + vertexShader: shader.vertexShader, + fragmentShader: shader.fragmentShader, + side: THREE.BackSide, + blending: THREE.AdditiveBlending, + transparent: true + + }); + + mesh = new THREE.Mesh(geometry, material); + mesh.scale.set( 1.1, 1.1, 1.1 ); + scene.add(mesh); + + geometry = new THREE.BoxGeometry(2.75, 2.75, 1); + geometry.applyMatrix(new THREE.Matrix4().makeTranslation(0,0,-0.5)); + + point = new THREE.Mesh(geometry); + + renderer = new THREE.WebGLRenderer({antialias: true}); + renderer.setSize(w, h); + renderer.setClearColor( 0x212121, 1 ); + + renderer.domElement.style.position = 'relative'; + + container.appendChild(renderer.domElement); + + container.addEventListener('mousedown', onMouseDown, false); + + container.addEventListener('mousewheel', onMouseWheel, false); + + document.addEventListener('keydown', onDocumentKeyDown, false); + + window.addEventListener('resize', onWindowResize, false); + + container.addEventListener('mouseover', function() { + overRenderer = true; + }, false); + + container.addEventListener('mouseout', function() { + overRenderer = false; + }, false); + } + + function addData(data, opts) { + var lat, lng, size, color, i, step, colorFnWrapper; + + opts.animated = opts.animated || false; + this.is_animated = opts.animated; + opts.format = opts.format || 'magnitude'; // other option is 'legend' + if (opts.format === 'magnitude') { + step = 3; + colorFnWrapper = function(data, i) { return colorFn(data[i+2]); } + } else if (opts.format === 'legend') { + step = 4; + colorFnWrapper = function(data, i) { return colorFn(data[i+3]); } + } else if (opts.format === 'peer') { + colorFnWrapper = function(data, i) { return colorFn(data[i+2]); } + } else { + throw('error: format not supported: '+opts.format); + } + + if (opts.animated) { + if (this._baseGeometry === undefined) { + this._baseGeometry = new THREE.Geometry(); + for (i = 0; i < data.length; i += step) { + lat = data[i]; + lng = data[i + 1]; +// size = data[i + 2]; + color = colorFnWrapper(data,i); + size = 0; + addPoint(lat, lng, size, color, this._baseGeometry); + } + } + if(this._morphTargetId === undefined) { + this._morphTargetId = 0; + } else { + this._morphTargetId += 1; + } + opts.name = opts.name || 'morphTarget'+this._morphTargetId; + } + var subgeo = new THREE.Geometry(); + for (i = 0; i < data.length; i += step) { + lat = data[i]; + lng = data[i + 1]; + color = colorFnWrapper(data,i); + size = data[i + 2]; + size = size*200; + addPoint(lat, lng, size, color, subgeo); + } + if (opts.animated) { + this._baseGeometry.morphTargets.push({'name': opts.name, vertices: subgeo.vertices}); + } else { + this._baseGeometry = subgeo; + } + + }; + + function createPoints() { + if (this._baseGeometry !== undefined) { + if (this.is_animated === false) { + this.points = new THREE.Mesh(this._baseGeometry, new THREE.MeshBasicMaterial({ + color: 0xffffff, + vertexColors: THREE.FaceColors, + morphTargets: false + })); + } else { + if (this._baseGeometry.morphTargets.length < 8) { + console.log('t l',this._baseGeometry.morphTargets.length); + var padding = 8-this._baseGeometry.morphTargets.length; + console.log('padding', padding); + for(var i=0; i<=padding; i++) { + console.log('padding',i); + this._baseGeometry.morphTargets.push({'name': 'morphPadding'+i, vertices: this._baseGeometry.vertices}); + } + } + this.points = new THREE.Mesh(this._baseGeometry, new THREE.MeshBasicMaterial({ + color: 0xffffff, + vertexColors: THREE.FaceColors, + morphTargets: true + })); + } + scene.add(this.points); + } + } + + function addPoint(lat, lng, size, color, subgeo) { + + var phi = (90 - lat) * Math.PI / 180; + var theta = (180 - lng) * Math.PI / 180; + + point.position.x = 200 * Math.sin(phi) * Math.cos(theta); + point.position.y = 200 * Math.cos(phi); + point.position.z = 200 * Math.sin(phi) * Math.sin(theta); + + point.lookAt(mesh.position); + + point.scale.z = Math.max( size, 0.1 ); // avoid non-invertible matrix + point.updateMatrix(); + + for (var i = 0; i < point.geometry.faces.length; i++) { + + point.geometry.faces[i].color = color; + + } + if(point.matrixAutoUpdate){ + point.updateMatrix(); + } + subgeo.merge(point.geometry, point.matrix); + } + + function onMouseDown(event) { + event.preventDefault(); + + container.addEventListener('mousemove', onMouseMove, false); + container.addEventListener('mouseup', onMouseUp, false); + container.addEventListener('mouseout', onMouseOut, false); + + mouseOnDown.x = - event.clientX; + mouseOnDown.y = event.clientY; + + targetOnDown.x = target.x; + targetOnDown.y = target.y; + + container.style.cursor = 'move'; + } + + function onMouseMove(event) { + mouse.x = - event.clientX; + mouse.y = event.clientY; + + var zoomDamp = distance/1000; + + target.x = targetOnDown.x + (mouse.x - mouseOnDown.x) * 0.005 * zoomDamp; + target.y = targetOnDown.y + (mouse.y - mouseOnDown.y) * 0.005 * zoomDamp; + + target.y = target.y > PI_HALF ? PI_HALF : target.y; + target.y = target.y < - PI_HALF ? - PI_HALF : target.y; + } + + function onMouseUp(event) { + container.removeEventListener('mousemove', onMouseMove, false); + container.removeEventListener('mouseup', onMouseUp, false); + container.removeEventListener('mouseout', onMouseOut, false); + container.style.cursor = 'auto'; + } + + function onMouseOut(event) { + container.removeEventListener('mousemove', onMouseMove, false); + container.removeEventListener('mouseup', onMouseUp, false); + container.removeEventListener('mouseout', onMouseOut, false); + } + + function onMouseWheel(event) { + event.preventDefault(); + if (overRenderer) { + zoom(event.wheelDeltaY * 0.3); + } + return false; + } + + function onDocumentKeyDown(event) { + switch (event.keyCode) { + case 38: + zoom(100); + event.preventDefault(); + break; + case 40: + zoom(-100); + event.preventDefault(); + break; + } + } + + function onWindowResize( event ) { + camera.aspect = container.offsetWidth / container.offsetHeight; + camera.updateProjectionMatrix(); + renderer.setSize( container.offsetWidth, container.offsetHeight ); + } + + function zoom(delta) { + distanceTarget -= delta; + distanceTarget = distanceTarget > 855 ? 855 : distanceTarget; + distanceTarget = distanceTarget < 350 ? 350 : distanceTarget; + } + + function animate() { + if (!running) return + requestAnimationFrame(animate); + render(); + } + + function render() { + zoom(curZoomSpeed); + + rotation.x += (target.x - rotation.x) * 0.1; + rotation.y += (target.y - rotation.y) * 0.1; + distance += (distanceTarget - distance) * 0.3; + + camera.position.x = distance * Math.sin(rotation.x) * Math.cos(rotation.y); + camera.position.y = distance * Math.sin(rotation.y); + camera.position.z = distance * Math.cos(rotation.x) * Math.cos(rotation.y); + + camera.lookAt(mesh.position); + + renderer.render(scene, camera); + } + + function unload() { + running = false + container.removeEventListener('mousedown', onMouseDown, false); + container.removeEventListener('mousewheel', onMouseWheel, false); + document.removeEventListener('keydown', onDocumentKeyDown, false); + window.removeEventListener('resize', onWindowResize, false); + + } + + init(); + this.animate = animate; + this.unload = unload; + + + this.__defineGetter__('time', function() { + return this._time || 0; + }); + + this.__defineSetter__('time', function(t) { + var validMorphs = []; + var morphDict = this.points.morphTargetDictionary; + for(var k in morphDict) { + if(k.indexOf('morphPadding') < 0) { + validMorphs.push(morphDict[k]); + } + } + validMorphs.sort(); + var l = validMorphs.length-1; + var scaledt = t*l+1; + var index = Math.floor(scaledt); + for (i=0;i= 0) { + this.points.morphTargetInfluences[lastIndex] = 1 - leftover; + } + this.points.morphTargetInfluences[index] = leftover; + this._time = t; + }); + + this.addData = addData; + this.createPoints = createPoints; + this.renderer = renderer; + this.scene = scene; + + return this; + +}; + diff --git a/plugins/Sidebar/media-globe/three.min.js b/plugins/Sidebar/media-globe/three.min.js new file mode 100644 index 00000000..a88b4afa --- /dev/null +++ b/plugins/Sidebar/media-globe/three.min.js @@ -0,0 +1,814 @@ +// threejs.org/license +'use strict';var THREE={REVISION:"69"};"object"===typeof module&&(module.exports=THREE);void 0===Math.sign&&(Math.sign=function(a){return 0>a?-1:0>16&255)/255;this.g=(a>>8&255)/255;this.b=(a&255)/255;return this},setRGB:function(a,b,c){this.r=a;this.g=b;this.b=c;return this},setHSL:function(a,b,c){if(0===b)this.r=this.g=this.b=c;else{var d=function(a,b,c){0>c&&(c+=1);1c?b:c<2/3?a+6*(b-a)*(2/3-c):a};b=.5>=c?c*(1+b):c+b-c*b;c=2*c-b;this.r=d(c,b,a+1/3);this.g=d(c,b,a);this.b=d(c,b,a-1/3)}return this},setStyle:function(a){if(/^rgb\((\d+), ?(\d+), ?(\d+)\)$/i.test(a))return a=/^rgb\((\d+), ?(\d+), ?(\d+)\)$/i.exec(a),this.r=Math.min(255,parseInt(a[1],10))/255,this.g=Math.min(255,parseInt(a[2],10))/255,this.b=Math.min(255,parseInt(a[3],10))/255,this;if(/^rgb\((\d+)\%, ?(\d+)\%, ?(\d+)\%\)$/i.test(a))return a=/^rgb\((\d+)\%, ?(\d+)\%, ?(\d+)\%\)$/i.exec(a),this.r= +Math.min(100,parseInt(a[1],10))/100,this.g=Math.min(100,parseInt(a[2],10))/100,this.b=Math.min(100,parseInt(a[3],10))/100,this;if(/^\#([0-9a-f]{6})$/i.test(a))return a=/^\#([0-9a-f]{6})$/i.exec(a),this.setHex(parseInt(a[1],16)),this;if(/^\#([0-9a-f])([0-9a-f])([0-9a-f])$/i.test(a))return a=/^\#([0-9a-f])([0-9a-f])([0-9a-f])$/i.exec(a),this.setHex(parseInt(a[1]+a[1]+a[2]+a[2]+a[3]+a[3],16)),this;if(/^(\w+)$/i.test(a))return this.setHex(THREE.ColorKeywords[a]),this},copy:function(a){this.r=a.r;this.g= +a.g;this.b=a.b;return this},copyGammaToLinear:function(a){this.r=a.r*a.r;this.g=a.g*a.g;this.b=a.b*a.b;return this},copyLinearToGamma:function(a){this.r=Math.sqrt(a.r);this.g=Math.sqrt(a.g);this.b=Math.sqrt(a.b);return this},convertGammaToLinear:function(){var a=this.r,b=this.g,c=this.b;this.r=a*a;this.g=b*b;this.b=c*c;return this},convertLinearToGamma:function(){this.r=Math.sqrt(this.r);this.g=Math.sqrt(this.g);this.b=Math.sqrt(this.b);return this},getHex:function(){return 255*this.r<<16^255*this.g<< +8^255*this.b<<0},getHexString:function(){return("000000"+this.getHex().toString(16)).slice(-6)},getHSL:function(a){a=a||{h:0,s:0,l:0};var b=this.r,c=this.g,d=this.b,e=Math.max(b,c,d),f=Math.min(b,c,d),g,h=(f+e)/2;if(f===e)f=g=0;else{var k=e-f,f=.5>=h?k/(e+f):k/(2-e-f);switch(e){case b:g=(c-d)/k+(cf&&c>b?(c=2*Math.sqrt(1+c-f-b),this._w=(k-g)/c,this._x=.25*c,this._y=(a+e)/c,this._z=(d+h)/c):f>b?(c=2*Math.sqrt(1+f-c-b),this._w=(d-h)/c,this._x=(a+e)/c,this._y= +.25*c,this._z=(g+k)/c):(c=2*Math.sqrt(1+b-c-f),this._w=(e-a)/c,this._x=(d+h)/c,this._y=(g+k)/c,this._z=.25*c);this.onChangeCallback();return this},setFromUnitVectors:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector3);b=c.dot(d)+1;1E-6>b?(b=0,Math.abs(c.x)>Math.abs(c.z)?a.set(-c.y,c.x,0):a.set(0,-c.z,c.y)):a.crossVectors(c,d);this._x=a.x;this._y=a.y;this._z=a.z;this._w=b;this.normalize();return this}}(),inverse:function(){this.conjugate().normalize();return this},conjugate:function(){this._x*= +-1;this._y*=-1;this._z*=-1;this.onChangeCallback();return this},dot:function(a){return this._x*a._x+this._y*a._y+this._z*a._z+this._w*a._w},lengthSq:function(){return this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w},length:function(){return Math.sqrt(this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w)},normalize:function(){var a=this.length();0===a?(this._z=this._y=this._x=0,this._w=1):(a=1/a,this._x*=a,this._y*=a,this._z*=a,this._w*=a);this.onChangeCallback();return this}, +multiply:function(a,b){return void 0!==b?(console.warn("THREE.Quaternion: .multiply() now only accepts one argument. Use .multiplyQuaternions( a, b ) instead."),this.multiplyQuaternions(a,b)):this.multiplyQuaternions(this,a)},multiplyQuaternions:function(a,b){var c=a._x,d=a._y,e=a._z,f=a._w,g=b._x,h=b._y,k=b._z,n=b._w;this._x=c*n+f*g+d*k-e*h;this._y=d*n+f*h+e*g-c*k;this._z=e*n+f*k+c*h-d*g;this._w=f*n-c*g-d*h-e*k;this.onChangeCallback();return this},multiplyVector3:function(a){console.warn("THREE.Quaternion: .multiplyVector3() has been removed. Use is now vector.applyQuaternion( quaternion ) instead."); +return a.applyQuaternion(this)},slerp:function(a,b){if(0===b)return this;if(1===b)return this.copy(a);var c=this._x,d=this._y,e=this._z,f=this._w,g=f*a._w+c*a._x+d*a._y+e*a._z;0>g?(this._w=-a._w,this._x=-a._x,this._y=-a._y,this._z=-a._z,g=-g):this.copy(a);if(1<=g)return this._w=f,this._x=c,this._y=d,this._z=e,this;var h=Math.acos(g),k=Math.sqrt(1-g*g);if(.001>Math.abs(k))return this._w=.5*(f+this._w),this._x=.5*(c+this._x),this._y=.5*(d+this._y),this._z=.5*(e+this._z),this;g=Math.sin((1-b)*h)/k;h= +Math.sin(b*h)/k;this._w=f*g+this._w*h;this._x=c*g+this._x*h;this._y=d*g+this._y*h;this._z=e*g+this._z*h;this.onChangeCallback();return this},equals:function(a){return a._x===this._x&&a._y===this._y&&a._z===this._z&&a._w===this._w},fromArray:function(a,b){void 0===b&&(b=0);this._x=a[b];this._y=a[b+1];this._z=a[b+2];this._w=a[b+3];this.onChangeCallback();return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this._x;a[b+1]=this._y;a[b+2]=this._z;a[b+3]=this._w;return a},onChange:function(a){this.onChangeCallback= +a;return this},onChangeCallback:function(){},clone:function(){return new THREE.Quaternion(this._x,this._y,this._z,this._w)}};THREE.Quaternion.slerp=function(a,b,c,d){return c.copy(a).slerp(b,d)};THREE.Vector2=function(a,b){this.x=a||0;this.y=b||0}; +THREE.Vector2.prototype={constructor:THREE.Vector2,set:function(a,b){this.x=a;this.y=b;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x;case 1:return this.y;default:throw Error("index is out of range: "+a);}},copy:function(a){this.x=a.x;this.y=a.y;return this},add:function(a, +b){if(void 0!==b)return console.warn("THREE.Vector2: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;return this},addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;return this},addScalar:function(a){this.x+=a;this.y+=a;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector2: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(a,b);this.x-=a.x;this.y-=a.y;return this}, +subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;return this},multiply:function(a){this.x*=a.x;this.y*=a.y;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;return this},divide:function(a){this.x/=a.x;this.y/=a.y;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a):this.y=this.x=0;return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector2,b=new THREE.Vector2);a.set(c,c);b.set(d,d);return this.clamp(a,b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);return this}, +roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);return this},negate:function(){this.x=-this.x;this.y=-this.y;return this},dot:function(a){return this.x*a.x+this.y*a.y},lengthSq:function(){return this.x*this.x+this.y*this.y},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y)},normalize:function(){return this.divideScalar(this.length())},distanceTo:function(a){return Math.sqrt(this.distanceToSquared(a))},distanceToSquared:function(a){var b= +this.x-a.x;a=this.y-a.y;return b*b+a*a},setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;return this},equals:function(a){return a.x===this.x&&a.y===this.y},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;return a},clone:function(){return new THREE.Vector2(this.x,this.y)}}; +THREE.Vector3=function(a,b,c){this.x=a||0;this.y=b||0;this.z=c||0}; +THREE.Vector3.prototype={constructor:THREE.Vector3,set:function(a,b,c){this.x=a;this.y=b;this.z=c;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setZ:function(a){this.z=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;case 2:this.z=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x;case 1:return this.y;case 2:return this.z;default:throw Error("index is out of range: "+ +a);}},copy:function(a){this.x=a.x;this.y=a.y;this.z=a.z;return this},add:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;this.z+=a.z;return this},addScalar:function(a){this.x+=a;this.y+=a;this.z+=a;return this},addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;this.z=a.z+b.z;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."), +this.subVectors(a,b);this.x-=a.x;this.y-=a.y;this.z-=a.z;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;this.z=a.z-b.z;return this},multiply:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .multiply() now only accepts one argument. Use .multiplyVectors( a, b ) instead."),this.multiplyVectors(a,b);this.x*=a.x;this.y*=a.y;this.z*=a.z;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;this.z*=a;return this},multiplyVectors:function(a,b){this.x=a.x*b.x;this.y= +a.y*b.y;this.z=a.z*b.z;return this},applyEuler:function(){var a;return function(b){!1===b instanceof THREE.Euler&&console.error("THREE.Vector3: .applyEuler() now expects a Euler rotation rather than a Vector3 and order.");void 0===a&&(a=new THREE.Quaternion);this.applyQuaternion(a.setFromEuler(b));return this}}(),applyAxisAngle:function(){var a;return function(b,c){void 0===a&&(a=new THREE.Quaternion);this.applyQuaternion(a.setFromAxisAngle(b,c));return this}}(),applyMatrix3:function(a){var b=this.x, +c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[3]*c+a[6]*d;this.y=a[1]*b+a[4]*c+a[7]*d;this.z=a[2]*b+a[5]*c+a[8]*d;return this},applyMatrix4:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d+a[12];this.y=a[1]*b+a[5]*c+a[9]*d+a[13];this.z=a[2]*b+a[6]*c+a[10]*d+a[14];return this},applyProjection:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;var e=1/(a[3]*b+a[7]*c+a[11]*d+a[15]);this.x=(a[0]*b+a[4]*c+a[8]*d+a[12])*e;this.y=(a[1]*b+a[5]*c+a[9]*d+a[13])*e;this.z= +(a[2]*b+a[6]*c+a[10]*d+a[14])*e;return this},applyQuaternion:function(a){var b=this.x,c=this.y,d=this.z,e=a.x,f=a.y,g=a.z;a=a.w;var h=a*b+f*d-g*c,k=a*c+g*b-e*d,n=a*d+e*c-f*b,b=-e*b-f*c-g*d;this.x=h*a+b*-e+k*-g-n*-f;this.y=k*a+b*-f+n*-e-h*-g;this.z=n*a+b*-g+h*-f-k*-e;return this},project:function(){var a;return function(b){void 0===a&&(a=new THREE.Matrix4);a.multiplyMatrices(b.projectionMatrix,a.getInverse(b.matrixWorld));return this.applyProjection(a)}}(),unproject:function(){var a;return function(b){void 0=== +a&&(a=new THREE.Matrix4);a.multiplyMatrices(b.matrixWorld,a.getInverse(b.projectionMatrix));return this.applyProjection(a)}}(),transformDirection:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d;this.y=a[1]*b+a[5]*c+a[9]*d;this.z=a[2]*b+a[6]*c+a[10]*d;this.normalize();return this},divide:function(a){this.x/=a.x;this.y/=a.y;this.z/=a.z;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a,this.z*=a):this.z=this.y=this.x=0;return this},min:function(a){this.x> +a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);this.z>a.z&&(this.z=a.z);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);this.zb.z&&(this.z=b.z);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector3,b=new THREE.Vector3);a.set(c,c,c);b.set(d,d,d);return this.clamp(a, +b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);this.z=Math.floor(this.z);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);this.z=Math.ceil(this.z);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);this.z=Math.round(this.z);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);this.z=0>this.z?Math.ceil(this.z):Math.floor(this.z); +return this},negate:function(){this.x=-this.x;this.y=-this.y;this.z=-this.z;return this},dot:function(a){return this.x*a.x+this.y*a.y+this.z*a.z},lengthSq:function(){return this.x*this.x+this.y*this.y+this.z*this.z},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z)},lengthManhattan:function(){return Math.abs(this.x)+Math.abs(this.y)+Math.abs(this.z)},normalize:function(){return this.divideScalar(this.length())},setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/ +b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;this.z+=(a.z-this.z)*b;return this},cross:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .cross() now only accepts one argument. Use .crossVectors( a, b ) instead."),this.crossVectors(a,b);var c=this.x,d=this.y,e=this.z;this.x=d*a.z-e*a.y;this.y=e*a.x-c*a.z;this.z=c*a.y-d*a.x;return this},crossVectors:function(a,b){var c=a.x,d=a.y,e=a.z,f=b.x,g=b.y,h=b.z;this.x=d*h-e*g;this.y=e*f-c*h;this.z=c*g-d*f;return this}, +projectOnVector:function(){var a,b;return function(c){void 0===a&&(a=new THREE.Vector3);a.copy(c).normalize();b=this.dot(a);return this.copy(a).multiplyScalar(b)}}(),projectOnPlane:function(){var a;return function(b){void 0===a&&(a=new THREE.Vector3);a.copy(this).projectOnVector(b);return this.sub(a)}}(),reflect:function(){var a;return function(b){void 0===a&&(a=new THREE.Vector3);return this.sub(a.copy(b).multiplyScalar(2*this.dot(b)))}}(),angleTo:function(a){a=this.dot(a)/(this.length()*a.length()); +return Math.acos(THREE.Math.clamp(a,-1,1))},distanceTo:function(a){return Math.sqrt(this.distanceToSquared(a))},distanceToSquared:function(a){var b=this.x-a.x,c=this.y-a.y;a=this.z-a.z;return b*b+c*c+a*a},setEulerFromRotationMatrix:function(a,b){console.error("THREE.Vector3: .setEulerFromRotationMatrix() has been removed. Use Euler.setFromRotationMatrix() instead.")},setEulerFromQuaternion:function(a,b){console.error("THREE.Vector3: .setEulerFromQuaternion() has been removed. Use Euler.setFromQuaternion() instead.")}, +getPositionFromMatrix:function(a){console.warn("THREE.Vector3: .getPositionFromMatrix() has been renamed to .setFromMatrixPosition().");return this.setFromMatrixPosition(a)},getScaleFromMatrix:function(a){console.warn("THREE.Vector3: .getScaleFromMatrix() has been renamed to .setFromMatrixScale().");return this.setFromMatrixScale(a)},getColumnFromMatrix:function(a,b){console.warn("THREE.Vector3: .getColumnFromMatrix() has been renamed to .setFromMatrixColumn().");return this.setFromMatrixColumn(a, +b)},setFromMatrixPosition:function(a){this.x=a.elements[12];this.y=a.elements[13];this.z=a.elements[14];return this},setFromMatrixScale:function(a){var b=this.set(a.elements[0],a.elements[1],a.elements[2]).length(),c=this.set(a.elements[4],a.elements[5],a.elements[6]).length();a=this.set(a.elements[8],a.elements[9],a.elements[10]).length();this.x=b;this.y=c;this.z=a;return this},setFromMatrixColumn:function(a,b){var c=4*a,d=b.elements;this.x=d[c];this.y=d[c+1];this.z=d[c+2];return this},equals:function(a){return a.x=== +this.x&&a.y===this.y&&a.z===this.z},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];this.z=a[b+2];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;a[b+2]=this.z;return a},clone:function(){return new THREE.Vector3(this.x,this.y,this.z)}};THREE.Vector4=function(a,b,c,d){this.x=a||0;this.y=b||0;this.z=c||0;this.w=void 0!==d?d:1}; +THREE.Vector4.prototype={constructor:THREE.Vector4,set:function(a,b,c,d){this.x=a;this.y=b;this.z=c;this.w=d;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setZ:function(a){this.z=a;return this},setW:function(a){this.w=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;case 2:this.z=b;break;case 3:this.w=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x; +case 1:return this.y;case 2:return this.z;case 3:return this.w;default:throw Error("index is out of range: "+a);}},copy:function(a){this.x=a.x;this.y=a.y;this.z=a.z;this.w=void 0!==a.w?a.w:1;return this},add:function(a,b){if(void 0!==b)return console.warn("THREE.Vector4: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;this.z+=a.z;this.w+=a.w;return this},addScalar:function(a){this.x+=a;this.y+=a;this.z+=a;this.w+=a;return this}, +addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;this.z=a.z+b.z;this.w=a.w+b.w;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector4: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(a,b);this.x-=a.x;this.y-=a.y;this.z-=a.z;this.w-=a.w;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;this.z=a.z-b.z;this.w=a.w-b.w;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;this.z*=a;this.w*=a;return this},applyMatrix4:function(a){var b= +this.x,c=this.y,d=this.z,e=this.w;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d+a[12]*e;this.y=a[1]*b+a[5]*c+a[9]*d+a[13]*e;this.z=a[2]*b+a[6]*c+a[10]*d+a[14]*e;this.w=a[3]*b+a[7]*c+a[11]*d+a[15]*e;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a,this.z*=a,this.w*=a):(this.z=this.y=this.x=0,this.w=1);return this},setAxisAngleFromQuaternion:function(a){this.w=2*Math.acos(a.w);var b=Math.sqrt(1-a.w*a.w);1E-4>b?(this.x=1,this.z=this.y=0):(this.x=a.x/b,this.y=a.y/b,this.z=a.z/b);return this}, +setAxisAngleFromRotationMatrix:function(a){var b,c,d;a=a.elements;var e=a[0];d=a[4];var f=a[8],g=a[1],h=a[5],k=a[9];c=a[2];b=a[6];var n=a[10];if(.01>Math.abs(d-g)&&.01>Math.abs(f-c)&&.01>Math.abs(k-b)){if(.1>Math.abs(d+g)&&.1>Math.abs(f+c)&&.1>Math.abs(k+b)&&.1>Math.abs(e+h+n-3))return this.set(1,0,0,0),this;a=Math.PI;e=(e+1)/2;h=(h+1)/2;n=(n+1)/2;d=(d+g)/4;f=(f+c)/4;k=(k+b)/4;e>h&&e>n?.01>e?(b=0,d=c=.707106781):(b=Math.sqrt(e),c=d/b,d=f/b):h>n?.01>h?(b=.707106781,c=0,d=.707106781):(c=Math.sqrt(h), +b=d/c,d=k/c):.01>n?(c=b=.707106781,d=0):(d=Math.sqrt(n),b=f/d,c=k/d);this.set(b,c,d,a);return this}a=Math.sqrt((b-k)*(b-k)+(f-c)*(f-c)+(g-d)*(g-d));.001>Math.abs(a)&&(a=1);this.x=(b-k)/a;this.y=(f-c)/a;this.z=(g-d)/a;this.w=Math.acos((e+h+n-1)/2);return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);this.z>a.z&&(this.z=a.z);this.w>a.w&&(this.w=a.w);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);this.zb.z&&(this.z=b.z);this.wb.w&&(this.w=b.w);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector4,b=new THREE.Vector4);a.set(c,c,c,c);b.set(d,d,d,d);return this.clamp(a,b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);this.z=Math.floor(this.z);this.w=Math.floor(this.w); +return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);this.z=Math.ceil(this.z);this.w=Math.ceil(this.w);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);this.z=Math.round(this.z);this.w=Math.round(this.w);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);this.z=0>this.z?Math.ceil(this.z):Math.floor(this.z);this.w=0>this.w?Math.ceil(this.w):Math.floor(this.w); +return this},negate:function(){this.x=-this.x;this.y=-this.y;this.z=-this.z;this.w=-this.w;return this},dot:function(a){return this.x*a.x+this.y*a.y+this.z*a.z+this.w*a.w},lengthSq:function(){return this.x*this.x+this.y*this.y+this.z*this.z+this.w*this.w},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z+this.w*this.w)},lengthManhattan:function(){return Math.abs(this.x)+Math.abs(this.y)+Math.abs(this.z)+Math.abs(this.w)},normalize:function(){return this.divideScalar(this.length())}, +setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;this.z+=(a.z-this.z)*b;this.w+=(a.w-this.w)*b;return this},equals:function(a){return a.x===this.x&&a.y===this.y&&a.z===this.z&&a.w===this.w},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];this.z=a[b+2];this.w=a[b+3];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;a[b+2]= +this.z;a[b+3]=this.w;return a},clone:function(){return new THREE.Vector4(this.x,this.y,this.z,this.w)}};THREE.Euler=function(a,b,c,d){this._x=a||0;this._y=b||0;this._z=c||0;this._order=d||THREE.Euler.DefaultOrder};THREE.Euler.RotationOrders="XYZ YZX ZXY XZY YXZ ZYX".split(" ");THREE.Euler.DefaultOrder="XYZ"; +THREE.Euler.prototype={constructor:THREE.Euler,_x:0,_y:0,_z:0,_order:THREE.Euler.DefaultOrder,get x(){return this._x},set x(a){this._x=a;this.onChangeCallback()},get y(){return this._y},set y(a){this._y=a;this.onChangeCallback()},get z(){return this._z},set z(a){this._z=a;this.onChangeCallback()},get order(){return this._order},set order(a){this._order=a;this.onChangeCallback()},set:function(a,b,c,d){this._x=a;this._y=b;this._z=c;this._order=d||this._order;this.onChangeCallback();return this},copy:function(a){this._x= +a._x;this._y=a._y;this._z=a._z;this._order=a._order;this.onChangeCallback();return this},setFromRotationMatrix:function(a,b){var c=THREE.Math.clamp,d=a.elements,e=d[0],f=d[4],g=d[8],h=d[1],k=d[5],n=d[9],p=d[2],q=d[6],d=d[10];b=b||this._order;"XYZ"===b?(this._y=Math.asin(c(g,-1,1)),.99999>Math.abs(g)?(this._x=Math.atan2(-n,d),this._z=Math.atan2(-f,e)):(this._x=Math.atan2(q,k),this._z=0)):"YXZ"===b?(this._x=Math.asin(-c(n,-1,1)),.99999>Math.abs(n)?(this._y=Math.atan2(g,d),this._z=Math.atan2(h,k)):(this._y= +Math.atan2(-p,e),this._z=0)):"ZXY"===b?(this._x=Math.asin(c(q,-1,1)),.99999>Math.abs(q)?(this._y=Math.atan2(-p,d),this._z=Math.atan2(-f,k)):(this._y=0,this._z=Math.atan2(h,e))):"ZYX"===b?(this._y=Math.asin(-c(p,-1,1)),.99999>Math.abs(p)?(this._x=Math.atan2(q,d),this._z=Math.atan2(h,e)):(this._x=0,this._z=Math.atan2(-f,k))):"YZX"===b?(this._z=Math.asin(c(h,-1,1)),.99999>Math.abs(h)?(this._x=Math.atan2(-n,k),this._y=Math.atan2(-p,e)):(this._x=0,this._y=Math.atan2(g,d))):"XZY"===b?(this._z=Math.asin(-c(f, +-1,1)),.99999>Math.abs(f)?(this._x=Math.atan2(q,k),this._y=Math.atan2(g,e)):(this._x=Math.atan2(-n,d),this._y=0)):console.warn("THREE.Euler: .setFromRotationMatrix() given unsupported order: "+b);this._order=b;this.onChangeCallback();return this},setFromQuaternion:function(a,b,c){var d=THREE.Math.clamp,e=a.x*a.x,f=a.y*a.y,g=a.z*a.z,h=a.w*a.w;b=b||this._order;"XYZ"===b?(this._x=Math.atan2(2*(a.x*a.w-a.y*a.z),h-e-f+g),this._y=Math.asin(d(2*(a.x*a.z+a.y*a.w),-1,1)),this._z=Math.atan2(2*(a.z*a.w-a.x* +a.y),h+e-f-g)):"YXZ"===b?(this._x=Math.asin(d(2*(a.x*a.w-a.y*a.z),-1,1)),this._y=Math.atan2(2*(a.x*a.z+a.y*a.w),h-e-f+g),this._z=Math.atan2(2*(a.x*a.y+a.z*a.w),h-e+f-g)):"ZXY"===b?(this._x=Math.asin(d(2*(a.x*a.w+a.y*a.z),-1,1)),this._y=Math.atan2(2*(a.y*a.w-a.z*a.x),h-e-f+g),this._z=Math.atan2(2*(a.z*a.w-a.x*a.y),h-e+f-g)):"ZYX"===b?(this._x=Math.atan2(2*(a.x*a.w+a.z*a.y),h-e-f+g),this._y=Math.asin(d(2*(a.y*a.w-a.x*a.z),-1,1)),this._z=Math.atan2(2*(a.x*a.y+a.z*a.w),h+e-f-g)):"YZX"===b?(this._x=Math.atan2(2* +(a.x*a.w-a.z*a.y),h-e+f-g),this._y=Math.atan2(2*(a.y*a.w-a.x*a.z),h+e-f-g),this._z=Math.asin(d(2*(a.x*a.y+a.z*a.w),-1,1))):"XZY"===b?(this._x=Math.atan2(2*(a.x*a.w+a.y*a.z),h-e+f-g),this._y=Math.atan2(2*(a.x*a.z+a.y*a.w),h+e-f-g),this._z=Math.asin(d(2*(a.z*a.w-a.x*a.y),-1,1))):console.warn("THREE.Euler: .setFromQuaternion() given unsupported order: "+b);this._order=b;if(!1!==c)this.onChangeCallback();return this},reorder:function(){var a=new THREE.Quaternion;return function(b){a.setFromEuler(this); +this.setFromQuaternion(a,b)}}(),equals:function(a){return a._x===this._x&&a._y===this._y&&a._z===this._z&&a._order===this._order},fromArray:function(a){this._x=a[0];this._y=a[1];this._z=a[2];void 0!==a[3]&&(this._order=a[3]);this.onChangeCallback();return this},toArray:function(){return[this._x,this._y,this._z,this._order]},onChange:function(a){this.onChangeCallback=a;return this},onChangeCallback:function(){},clone:function(){return new THREE.Euler(this._x,this._y,this._z,this._order)}}; +THREE.Line3=function(a,b){this.start=void 0!==a?a:new THREE.Vector3;this.end=void 0!==b?b:new THREE.Vector3}; +THREE.Line3.prototype={constructor:THREE.Line3,set:function(a,b){this.start.copy(a);this.end.copy(b);return this},copy:function(a){this.start.copy(a.start);this.end.copy(a.end);return this},center:function(a){return(a||new THREE.Vector3).addVectors(this.start,this.end).multiplyScalar(.5)},delta:function(a){return(a||new THREE.Vector3).subVectors(this.end,this.start)},distanceSq:function(){return this.start.distanceToSquared(this.end)},distance:function(){return this.start.distanceTo(this.end)},at:function(a, +b){var c=b||new THREE.Vector3;return this.delta(c).multiplyScalar(a).add(this.start)},closestPointToPointParameter:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c,d){a.subVectors(c,this.start);b.subVectors(this.end,this.start);var e=b.dot(b),e=b.dot(a)/e;d&&(e=THREE.Math.clamp(e,0,1));return e}}(),closestPointToPoint:function(a,b,c){a=this.closestPointToPointParameter(a,b);c=c||new THREE.Vector3;return this.delta(c).multiplyScalar(a).add(this.start)},applyMatrix4:function(a){this.start.applyMatrix4(a); +this.end.applyMatrix4(a);return this},equals:function(a){return a.start.equals(this.start)&&a.end.equals(this.end)},clone:function(){return(new THREE.Line3).copy(this)}};THREE.Box2=function(a,b){this.min=void 0!==a?a:new THREE.Vector2(Infinity,Infinity);this.max=void 0!==b?b:new THREE.Vector2(-Infinity,-Infinity)}; +THREE.Box2.prototype={constructor:THREE.Box2,set:function(a,b){this.min.copy(a);this.max.copy(b);return this},setFromPoints:function(a){this.makeEmpty();for(var b=0,c=a.length;bthis.max.x||a.ythis.max.y?!1:!0},containsBox:function(a){return this.min.x<=a.min.x&&a.max.x<=this.max.x&&this.min.y<=a.min.y&&a.max.y<=this.max.y?!0:!1},getParameter:function(a,b){return(b||new THREE.Vector2).set((a.x-this.min.x)/(this.max.x-this.min.x),(a.y-this.min.y)/(this.max.y-this.min.y))},isIntersectionBox:function(a){return a.max.xthis.max.x||a.max.y +this.max.y?!1:!0},clampPoint:function(a,b){return(b||new THREE.Vector2).copy(a).clamp(this.min,this.max)},distanceToPoint:function(){var a=new THREE.Vector2;return function(b){return a.copy(b).clamp(this.min,this.max).sub(b).length()}}(),intersect:function(a){this.min.max(a.min);this.max.min(a.max);return this},union:function(a){this.min.min(a.min);this.max.max(a.max);return this},translate:function(a){this.min.add(a);this.max.add(a);return this},equals:function(a){return a.min.equals(this.min)&& +a.max.equals(this.max)},clone:function(){return(new THREE.Box2).copy(this)}};THREE.Box3=function(a,b){this.min=void 0!==a?a:new THREE.Vector3(Infinity,Infinity,Infinity);this.max=void 0!==b?b:new THREE.Vector3(-Infinity,-Infinity,-Infinity)}; +THREE.Box3.prototype={constructor:THREE.Box3,set:function(a,b){this.min.copy(a);this.max.copy(b);return this},setFromPoints:function(a){this.makeEmpty();for(var b=0,c=a.length;bthis.max.x||a.ythis.max.y||a.zthis.max.z?!1:!0},containsBox:function(a){return this.min.x<=a.min.x&&a.max.x<=this.max.x&&this.min.y<=a.min.y&&a.max.y<=this.max.y&&this.min.z<=a.min.z&&a.max.z<=this.max.z?!0:!1},getParameter:function(a,b){return(b||new THREE.Vector3).set((a.x-this.min.x)/(this.max.x- +this.min.x),(a.y-this.min.y)/(this.max.y-this.min.y),(a.z-this.min.z)/(this.max.z-this.min.z))},isIntersectionBox:function(a){return a.max.xthis.max.x||a.max.ythis.max.y||a.max.zthis.max.z?!1:!0},clampPoint:function(a,b){return(b||new THREE.Vector3).copy(a).clamp(this.min,this.max)},distanceToPoint:function(){var a=new THREE.Vector3;return function(b){return a.copy(b).clamp(this.min,this.max).sub(b).length()}}(),getBoundingSphere:function(){var a= +new THREE.Vector3;return function(b){b=b||new THREE.Sphere;b.center=this.center();b.radius=.5*this.size(a).length();return b}}(),intersect:function(a){this.min.max(a.min);this.max.min(a.max);return this},union:function(a){this.min.min(a.min);this.max.max(a.max);return this},applyMatrix4:function(){var a=[new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3];return function(b){a[0].set(this.min.x,this.min.y, +this.min.z).applyMatrix4(b);a[1].set(this.min.x,this.min.y,this.max.z).applyMatrix4(b);a[2].set(this.min.x,this.max.y,this.min.z).applyMatrix4(b);a[3].set(this.min.x,this.max.y,this.max.z).applyMatrix4(b);a[4].set(this.max.x,this.min.y,this.min.z).applyMatrix4(b);a[5].set(this.max.x,this.min.y,this.max.z).applyMatrix4(b);a[6].set(this.max.x,this.max.y,this.min.z).applyMatrix4(b);a[7].set(this.max.x,this.max.y,this.max.z).applyMatrix4(b);this.makeEmpty();this.setFromPoints(a);return this}}(),translate:function(a){this.min.add(a); +this.max.add(a);return this},equals:function(a){return a.min.equals(this.min)&&a.max.equals(this.max)},clone:function(){return(new THREE.Box3).copy(this)}};THREE.Matrix3=function(){this.elements=new Float32Array([1,0,0,0,1,0,0,0,1]);0this.determinant()&&(g=-g);c.x=f[12];c.y=f[13];c.z=f[14];b.elements.set(this.elements);c=1/g;var f=1/h,n=1/k;b.elements[0]*=c;b.elements[1]*= +c;b.elements[2]*=c;b.elements[4]*=f;b.elements[5]*=f;b.elements[6]*=f;b.elements[8]*=n;b.elements[9]*=n;b.elements[10]*=n;d.setFromRotationMatrix(b);e.x=g;e.y=h;e.z=k;return this}}(),makeFrustum:function(a,b,c,d,e,f){var g=this.elements;g[0]=2*e/(b-a);g[4]=0;g[8]=(b+a)/(b-a);g[12]=0;g[1]=0;g[5]=2*e/(d-c);g[9]=(d+c)/(d-c);g[13]=0;g[2]=0;g[6]=0;g[10]=-(f+e)/(f-e);g[14]=-2*f*e/(f-e);g[3]=0;g[7]=0;g[11]=-1;g[15]=0;return this},makePerspective:function(a,b,c,d){a=c*Math.tan(THREE.Math.degToRad(.5*a)); +var e=-a;return this.makeFrustum(e*b,a*b,e,a,c,d)},makeOrthographic:function(a,b,c,d,e,f){var g=this.elements,h=b-a,k=c-d,n=f-e;g[0]=2/h;g[4]=0;g[8]=0;g[12]=-((b+a)/h);g[1]=0;g[5]=2/k;g[9]=0;g[13]=-((c+d)/k);g[2]=0;g[6]=0;g[10]=-2/n;g[14]=-((f+e)/n);g[3]=0;g[7]=0;g[11]=0;g[15]=1;return this},fromArray:function(a){this.elements.set(a);return this},toArray:function(){var a=this.elements;return[a[0],a[1],a[2],a[3],a[4],a[5],a[6],a[7],a[8],a[9],a[10],a[11],a[12],a[13],a[14],a[15]]},clone:function(){return(new THREE.Matrix4).fromArray(this.elements)}}; +THREE.Ray=function(a,b){this.origin=void 0!==a?a:new THREE.Vector3;this.direction=void 0!==b?b:new THREE.Vector3}; +THREE.Ray.prototype={constructor:THREE.Ray,set:function(a,b){this.origin.copy(a);this.direction.copy(b);return this},copy:function(a){this.origin.copy(a.origin);this.direction.copy(a.direction);return this},at:function(a,b){return(b||new THREE.Vector3).copy(this.direction).multiplyScalar(a).add(this.origin)},recast:function(){var a=new THREE.Vector3;return function(b){this.origin.copy(this.at(b,a));return this}}(),closestPointToPoint:function(a,b){var c=b||new THREE.Vector3;c.subVectors(a,this.origin); +var d=c.dot(this.direction);return 0>d?c.copy(this.origin):c.copy(this.direction).multiplyScalar(d).add(this.origin)},distanceToPoint:function(){var a=new THREE.Vector3;return function(b){var c=a.subVectors(b,this.origin).dot(this.direction);if(0>c)return this.origin.distanceTo(b);a.copy(this.direction).multiplyScalar(c).add(this.origin);return a.distanceTo(b)}}(),distanceSqToSegment:function(a,b,c,d){var e=a.clone().add(b).multiplyScalar(.5),f=b.clone().sub(a).normalize(),g=.5*a.distanceTo(b),h= +this.origin.clone().sub(e);a=-this.direction.dot(f);b=h.dot(this.direction);var k=-h.dot(f),n=h.lengthSq(),p=Math.abs(1-a*a),q,m;0<=p?(h=a*k-b,q=a*b-k,m=g*p,0<=h?q>=-m?q<=m?(g=1/p,h*=g,q*=g,a=h*(h+a*q+2*b)+q*(a*h+q+2*k)+n):(q=g,h=Math.max(0,-(a*q+b)),a=-h*h+q*(q+2*k)+n):(q=-g,h=Math.max(0,-(a*q+b)),a=-h*h+q*(q+2*k)+n):q<=-m?(h=Math.max(0,-(-a*g+b)),q=0f)return null;f=Math.sqrt(f-e);e=d-f; +d+=f;return 0>e&&0>d?null:0>e?this.at(d,c):this.at(e,c)}}(),isIntersectionPlane:function(a){var b=a.distanceToPoint(this.origin);return 0===b||0>a.normal.dot(this.direction)*b?!0:!1},distanceToPlane:function(a){var b=a.normal.dot(this.direction);if(0==b)return 0==a.distanceToPoint(this.origin)?0:null;a=-(this.origin.dot(a.normal)+a.constant)/b;return 0<=a?a:null},intersectPlane:function(a,b){var c=this.distanceToPlane(a);return null===c?null:this.at(c,b)},isIntersectionBox:function(){var a=new THREE.Vector3; +return function(b){return null!==this.intersectBox(b,a)}}(),intersectBox:function(a,b){var c,d,e,f,g;d=1/this.direction.x;f=1/this.direction.y;g=1/this.direction.z;var h=this.origin;0<=d?(c=(a.min.x-h.x)*d,d*=a.max.x-h.x):(c=(a.max.x-h.x)*d,d*=a.min.x-h.x);0<=f?(e=(a.min.y-h.y)*f,f*=a.max.y-h.y):(e=(a.max.y-h.y)*f,f*=a.min.y-h.y);if(c>f||e>d)return null;if(e>c||c!==c)c=e;if(fg||e>d)return null;if(e>c||c!== +c)c=e;if(gd?null:this.at(0<=c?c:d,b)},intersectTriangle:function(){var a=new THREE.Vector3,b=new THREE.Vector3,c=new THREE.Vector3,d=new THREE.Vector3;return function(e,f,g,h,k){b.subVectors(f,e);c.subVectors(g,e);d.crossVectors(b,c);f=this.direction.dot(d);if(0f)h=-1,f=-f;else return null;a.subVectors(this.origin,e);e=h*this.direction.dot(c.crossVectors(a,c));if(0>e)return null;g=h*this.direction.dot(b.cross(a));if(0>g||e+g>f)return null; +e=-h*a.dot(d);return 0>e?null:this.at(e/f,k)}}(),applyMatrix4:function(a){this.direction.add(this.origin).applyMatrix4(a);this.origin.applyMatrix4(a);this.direction.sub(this.origin);this.direction.normalize();return this},equals:function(a){return a.origin.equals(this.origin)&&a.direction.equals(this.direction)},clone:function(){return(new THREE.Ray).copy(this)}};THREE.Sphere=function(a,b){this.center=void 0!==a?a:new THREE.Vector3;this.radius=void 0!==b?b:0}; +THREE.Sphere.prototype={constructor:THREE.Sphere,set:function(a,b){this.center.copy(a);this.radius=b;return this},setFromPoints:function(){var a=new THREE.Box3;return function(b,c){var d=this.center;void 0!==c?d.copy(c):a.setFromPoints(b).center(d);for(var e=0,f=0,g=b.length;f=this.radius},containsPoint:function(a){return a.distanceToSquared(this.center)<= +this.radius*this.radius},distanceToPoint:function(a){return a.distanceTo(this.center)-this.radius},intersectsSphere:function(a){var b=this.radius+a.radius;return a.center.distanceToSquared(this.center)<=b*b},clampPoint:function(a,b){var c=this.center.distanceToSquared(a),d=b||new THREE.Vector3;d.copy(a);c>this.radius*this.radius&&(d.sub(this.center).normalize(),d.multiplyScalar(this.radius).add(this.center));return d},getBoundingBox:function(a){a=a||new THREE.Box3;a.set(this.center,this.center);a.expandByScalar(this.radius); +return a},applyMatrix4:function(a){this.center.applyMatrix4(a);this.radius*=a.getMaxScaleOnAxis();return this},translate:function(a){this.center.add(a);return this},equals:function(a){return a.center.equals(this.center)&&a.radius===this.radius},clone:function(){return(new THREE.Sphere).copy(this)}}; +THREE.Frustum=function(a,b,c,d,e,f){this.planes=[void 0!==a?a:new THREE.Plane,void 0!==b?b:new THREE.Plane,void 0!==c?c:new THREE.Plane,void 0!==d?d:new THREE.Plane,void 0!==e?e:new THREE.Plane,void 0!==f?f:new THREE.Plane]}; +THREE.Frustum.prototype={constructor:THREE.Frustum,set:function(a,b,c,d,e,f){var g=this.planes;g[0].copy(a);g[1].copy(b);g[2].copy(c);g[3].copy(d);g[4].copy(e);g[5].copy(f);return this},copy:function(a){for(var b=this.planes,c=0;6>c;c++)b[c].copy(a.planes[c]);return this},setFromMatrix:function(a){var b=this.planes,c=a.elements;a=c[0];var d=c[1],e=c[2],f=c[3],g=c[4],h=c[5],k=c[6],n=c[7],p=c[8],q=c[9],m=c[10],r=c[11],t=c[12],s=c[13],u=c[14],c=c[15];b[0].setComponents(f-a,n-g,r-p,c-t).normalize();b[1].setComponents(f+ +a,n+g,r+p,c+t).normalize();b[2].setComponents(f+d,n+h,r+q,c+s).normalize();b[3].setComponents(f-d,n-h,r-q,c-s).normalize();b[4].setComponents(f-e,n-k,r-m,c-u).normalize();b[5].setComponents(f+e,n+k,r+m,c+u).normalize();return this},intersectsObject:function(){var a=new THREE.Sphere;return function(b){var c=b.geometry;null===c.boundingSphere&&c.computeBoundingSphere();a.copy(c.boundingSphere);a.applyMatrix4(b.matrixWorld);return this.intersectsSphere(a)}}(),intersectsSphere:function(a){var b=this.planes, +c=a.center;a=-a.radius;for(var d=0;6>d;d++)if(b[d].distanceToPoint(c)e;e++){var f=d[e];a.x=0g&&0>f)return!1}return!0}}(), +containsPoint:function(a){for(var b=this.planes,c=0;6>c;c++)if(0>b[c].distanceToPoint(a))return!1;return!0},clone:function(){return(new THREE.Frustum).copy(this)}};THREE.Plane=function(a,b){this.normal=void 0!==a?a:new THREE.Vector3(1,0,0);this.constant=void 0!==b?b:0}; +THREE.Plane.prototype={constructor:THREE.Plane,set:function(a,b){this.normal.copy(a);this.constant=b;return this},setComponents:function(a,b,c,d){this.normal.set(a,b,c);this.constant=d;return this},setFromNormalAndCoplanarPoint:function(a,b){this.normal.copy(a);this.constant=-b.dot(this.normal);return this},setFromCoplanarPoints:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c,d,e){d=a.subVectors(e,d).cross(b.subVectors(c,d)).normalize();this.setFromNormalAndCoplanarPoint(d, +c);return this}}(),copy:function(a){this.normal.copy(a.normal);this.constant=a.constant;return this},normalize:function(){var a=1/this.normal.length();this.normal.multiplyScalar(a);this.constant*=a;return this},negate:function(){this.constant*=-1;this.normal.negate();return this},distanceToPoint:function(a){return this.normal.dot(a)+this.constant},distanceToSphere:function(a){return this.distanceToPoint(a.center)-a.radius},projectPoint:function(a,b){return this.orthoPoint(a,b).sub(a).negate()},orthoPoint:function(a, +b){var c=this.distanceToPoint(a);return(b||new THREE.Vector3).copy(this.normal).multiplyScalar(c)},isIntersectionLine:function(a){var b=this.distanceToPoint(a.start);a=this.distanceToPoint(a.end);return 0>b&&0a&&0f||1e;e++)8==e||13==e||18==e||23==e?b[e]="-":14==e?b[e]="4":(2>=c&&(c=33554432+16777216*Math.random()|0),d=c&15,c>>=4,b[e]=a[19==e?d&3|8:d]);return b.join("")}}(),clamp:function(a,b,c){return ac?c:a},clampBottom:function(a,b){return a=c)return 1;a=(a-b)/(c-b);return a*a*(3-2*a)},smootherstep:function(a,b,c){if(a<=b)return 0;if(a>=c)return 1;a=(a-b)/(c-b);return a*a*a*(a*(6*a-15)+10)},random16:function(){return(65280*Math.random()+255*Math.random())/65535},randInt:function(a,b){return a+Math.floor(Math.random()*(b-a+1))},randFloat:function(a,b){return a+Math.random()*(b-a)},randFloatSpread:function(a){return a*(.5-Math.random())},degToRad:function(){var a=Math.PI/180;return function(b){return b*a}}(),radToDeg:function(){var a= +180/Math.PI;return function(b){return b*a}}(),isPowerOfTwo:function(a){return 0===(a&a-1)&&0!==a}}; +THREE.Spline=function(a){function b(a,b,c,d,e,f,g){a=.5*(c-a);d=.5*(d-b);return(2*(b-c)+a+d)*g+(-3*(b-c)-2*a-d)*f+a*e+b}this.points=a;var c=[],d={x:0,y:0,z:0},e,f,g,h,k,n,p,q,m;this.initFromArray=function(a){this.points=[];for(var b=0;bthis.points.length-2?this.points.length-1:f+1;c[3]=f>this.points.length-3?this.points.length-1:f+ +2;n=this.points[c[0]];p=this.points[c[1]];q=this.points[c[2]];m=this.points[c[3]];h=g*g;k=g*h;d.x=b(n.x,p.x,q.x,m.x,g,h,k);d.y=b(n.y,p.y,q.y,m.y,g,h,k);d.z=b(n.z,p.z,q.z,m.z,g,h,k);return d};this.getControlPointsArray=function(){var a,b,c=this.points.length,d=[];for(a=0;a=b.x+b.y}}(); +THREE.Triangle.prototype={constructor:THREE.Triangle,set:function(a,b,c){this.a.copy(a);this.b.copy(b);this.c.copy(c);return this},setFromPointsAndIndices:function(a,b,c,d){this.a.copy(a[b]);this.b.copy(a[c]);this.c.copy(a[d]);return this},copy:function(a){this.a.copy(a.a);this.b.copy(a.b);this.c.copy(a.c);return this},area:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(){a.subVectors(this.c,this.b);b.subVectors(this.a,this.b);return.5*a.cross(b).length()}}(),midpoint:function(a){return(a|| +new THREE.Vector3).addVectors(this.a,this.b).add(this.c).multiplyScalar(1/3)},normal:function(a){return THREE.Triangle.normal(this.a,this.b,this.c,a)},plane:function(a){return(a||new THREE.Plane).setFromCoplanarPoints(this.a,this.b,this.c)},barycoordFromPoint:function(a,b){return THREE.Triangle.barycoordFromPoint(a,this.a,this.b,this.c,b)},containsPoint:function(a){return THREE.Triangle.containsPoint(a,this.a,this.b,this.c)},equals:function(a){return a.a.equals(this.a)&&a.b.equals(this.b)&&a.c.equals(this.c)}, +clone:function(){return(new THREE.Triangle).copy(this)}};THREE.Clock=function(a){this.autoStart=void 0!==a?a:!0;this.elapsedTime=this.oldTime=this.startTime=0;this.running=!1}; +THREE.Clock.prototype={constructor:THREE.Clock,start:function(){this.oldTime=this.startTime=void 0!==self.performance&&void 0!==self.performance.now?self.performance.now():Date.now();this.running=!0},stop:function(){this.getElapsedTime();this.running=!1},getElapsedTime:function(){this.getDelta();return this.elapsedTime},getDelta:function(){var a=0;this.autoStart&&!this.running&&this.start();if(this.running){var b=void 0!==self.performance&&void 0!==self.performance.now?self.performance.now():Date.now(), +a=.001*(b-this.oldTime);this.oldTime=b;this.elapsedTime+=a}return a}};THREE.EventDispatcher=function(){}; +THREE.EventDispatcher.prototype={constructor:THREE.EventDispatcher,apply:function(a){a.addEventListener=THREE.EventDispatcher.prototype.addEventListener;a.hasEventListener=THREE.EventDispatcher.prototype.hasEventListener;a.removeEventListener=THREE.EventDispatcher.prototype.removeEventListener;a.dispatchEvent=THREE.EventDispatcher.prototype.dispatchEvent},addEventListener:function(a,b){void 0===this._listeners&&(this._listeners={});var c=this._listeners;void 0===c[a]&&(c[a]=[]);-1===c[a].indexOf(b)&& +c[a].push(b)},hasEventListener:function(a,b){if(void 0===this._listeners)return!1;var c=this._listeners;return void 0!==c[a]&&-1!==c[a].indexOf(b)?!0:!1},removeEventListener:function(a,b){if(void 0!==this._listeners){var c=this._listeners[a];if(void 0!==c){var d=c.indexOf(b);-1!==d&&c.splice(d,1)}}},dispatchEvent:function(a){if(void 0!==this._listeners){var b=this._listeners[a.type];if(void 0!==b){a.target=this;for(var c=[],d=b.length,e=0;eza?-1:1;h[4*a]=la.x;h[4*a+1]=la.y;h[4*a+2]=la.z;h[4*a+3]=Ga}if(void 0===this.attributes.index||void 0===this.attributes.position||void 0===this.attributes.normal||void 0===this.attributes.uv)console.warn("Missing required attributes (index, position, normal or uv) in BufferGeometry.computeTangents()");else{var c=this.attributes.index.array,d=this.attributes.position.array, +e=this.attributes.normal.array,f=this.attributes.uv.array,g=d.length/3;void 0===this.attributes.tangent&&this.addAttribute("tangent",new THREE.BufferAttribute(new Float32Array(4*g),4));for(var h=this.attributes.tangent.array,k=[],n=[],p=0;ps;s++)t=a[3*c+s],-1==m[t]?(q[2*s]=t,q[2*s+1]=-1,p++):m[t]k.index+b)for(k={start:f,count:0,index:g},h.push(k),p=0;6>p;p+=2)s=q[p+1],-1p;p+=2)t=q[p],s=q[p+1],-1===s&&(s=g++),m[t]=s,r[s]=t,e[f++]=s-k.index,k.count++}this.reorderBuffers(e,r,g);return this.offsets=h},merge:function(){console.log("BufferGeometry.merge(): TODO")},normalizeNormals:function(){for(var a=this.attributes.normal.array,b,c,d,e=0,f=a.length;ed?-1:1,e.vertexTangents[c]=new THREE.Vector4(w.x,w.y,w.z,d);this.hasTangents=!0},computeLineDistances:function(){for(var a=0,b=this.vertices,c=0,d=b.length;cd;d++)if(e[d]==e[(d+1)%3]){a.push(f);break}for(f=a.length-1;0<=f;f--)for(e=a[f],this.faces.splice(e,1),c=0,g=this.faceVertexUvs.length;ca.opacity)h.transparent=a.transparent;void 0!==a.depthTest&&(h.depthTest=a.depthTest);void 0!==a.depthWrite&&(h.depthWrite=a.depthWrite);void 0!==a.visible&&(h.visible=a.visible);void 0!==a.flipSided&&(h.side=THREE.BackSide);void 0!==a.doubleSided&&(h.side=THREE.DoubleSide);void 0!==a.wireframe&&(h.wireframe=a.wireframe);void 0!==a.vertexColors&&("face"=== +a.vertexColors?h.vertexColors=THREE.FaceColors:a.vertexColors&&(h.vertexColors=THREE.VertexColors));a.colorDiffuse?h.color=e(a.colorDiffuse):a.DbgColor&&(h.color=a.DbgColor);a.colorSpecular&&(h.specular=e(a.colorSpecular));a.colorAmbient&&(h.ambient=e(a.colorAmbient));a.colorEmissive&&(h.emissive=e(a.colorEmissive));a.transparency&&(h.opacity=a.transparency);a.specularCoef&&(h.shininess=a.specularCoef);a.mapDiffuse&&b&&d(h,"map",a.mapDiffuse,a.mapDiffuseRepeat,a.mapDiffuseOffset,a.mapDiffuseWrap, +a.mapDiffuseAnisotropy);a.mapLight&&b&&d(h,"lightMap",a.mapLight,a.mapLightRepeat,a.mapLightOffset,a.mapLightWrap,a.mapLightAnisotropy);a.mapBump&&b&&d(h,"bumpMap",a.mapBump,a.mapBumpRepeat,a.mapBumpOffset,a.mapBumpWrap,a.mapBumpAnisotropy);a.mapNormal&&b&&d(h,"normalMap",a.mapNormal,a.mapNormalRepeat,a.mapNormalOffset,a.mapNormalWrap,a.mapNormalAnisotropy);a.mapSpecular&&b&&d(h,"specularMap",a.mapSpecular,a.mapSpecularRepeat,a.mapSpecularOffset,a.mapSpecularWrap,a.mapSpecularAnisotropy);a.mapAlpha&& +b&&d(h,"alphaMap",a.mapAlpha,a.mapAlphaRepeat,a.mapAlphaOffset,a.mapAlphaWrap,a.mapAlphaAnisotropy);a.mapBumpScale&&(h.bumpScale=a.mapBumpScale);a.mapNormal?(g=THREE.ShaderLib.normalmap,k=THREE.UniformsUtils.clone(g.uniforms),k.tNormal.value=h.normalMap,a.mapNormalFactor&&k.uNormalScale.value.set(a.mapNormalFactor,a.mapNormalFactor),h.map&&(k.tDiffuse.value=h.map,k.enableDiffuse.value=!0),h.specularMap&&(k.tSpecular.value=h.specularMap,k.enableSpecular.value=!0),h.lightMap&&(k.tAO.value=h.lightMap, +k.enableAO.value=!0),k.diffuse.value.setHex(h.color),k.specular.value.setHex(h.specular),k.ambient.value.setHex(h.ambient),k.shininess.value=h.shininess,void 0!==h.opacity&&(k.opacity.value=h.opacity),g=new THREE.ShaderMaterial({fragmentShader:g.fragmentShader,vertexShader:g.vertexShader,uniforms:k,lights:!0,fog:!0}),h.transparent&&(g.transparent=!0)):g=new THREE[g](h);void 0!==a.DbgName&&(g.name=a.DbgName);return g}}; +THREE.Loader.Handlers={handlers:[],add:function(a,b){this.handlers.push(a,b)},get:function(a){for(var b=0,c=this.handlers.length;bg;g++)m=y[k++],v=u[2*m],m=u[2*m+1],v=new THREE.Vector2(v,m),2!==g&&c.faceVertexUvs[d][h].push(v),0!==g&&c.faceVertexUvs[d][h+1].push(v);q&&(q=3*y[k++],r.normal.set(G[q++],G[q++],G[q]),s.normal.copy(r.normal));if(t)for(d=0;4>d;d++)q=3*y[k++],t=new THREE.Vector3(G[q++], +G[q++],G[q]),2!==d&&r.vertexNormals.push(t),0!==d&&s.vertexNormals.push(t);p&&(p=y[k++],p=w[p],r.color.setHex(p),s.color.setHex(p));if(b)for(d=0;4>d;d++)p=y[k++],p=w[p],2!==d&&r.vertexColors.push(new THREE.Color(p)),0!==d&&s.vertexColors.push(new THREE.Color(p));c.faces.push(r);c.faces.push(s)}else{r=new THREE.Face3;r.a=y[k++];r.b=y[k++];r.c=y[k++];h&&(h=y[k++],r.materialIndex=h);h=c.faces.length;if(d)for(d=0;dg;g++)m=y[k++],v=u[2*m],m=u[2*m+1], +v=new THREE.Vector2(v,m),c.faceVertexUvs[d][h].push(v);q&&(q=3*y[k++],r.normal.set(G[q++],G[q++],G[q]));if(t)for(d=0;3>d;d++)q=3*y[k++],t=new THREE.Vector3(G[q++],G[q++],G[q]),r.vertexNormals.push(t);p&&(p=y[k++],r.color.setHex(w[p]));if(b)for(d=0;3>d;d++)p=y[k++],r.vertexColors.push(new THREE.Color(w[p]));c.faces.push(r)}})(d);(function(){var b=void 0!==a.influencesPerVertex?a.influencesPerVertex:2;if(a.skinWeights)for(var d=0,g=a.skinWeights.length;dthis.opacity&&(a.opacity=this.opacity);!1!==this.transparent&&(a.transparent=this.transparent);!1!==this.wireframe&&(a.wireframe=this.wireframe);return a},clone:function(a){void 0===a&&(a=new THREE.Material);a.name=this.name;a.side=this.side;a.opacity=this.opacity;a.transparent=this.transparent;a.blending=this.blending;a.blendSrc=this.blendSrc;a.blendDst=this.blendDst;a.blendEquation=this.blendEquation;a.depthTest=this.depthTest;a.depthWrite=this.depthWrite;a.polygonOffset=this.polygonOffset;a.polygonOffsetFactor= +this.polygonOffsetFactor;a.polygonOffsetUnits=this.polygonOffsetUnits;a.alphaTest=this.alphaTest;a.overdraw=this.overdraw;a.visible=this.visible;return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.Material.prototype);THREE.MaterialIdCount=0; +THREE.LineBasicMaterial=function(a){THREE.Material.call(this);this.type="LineBasicMaterial";this.color=new THREE.Color(16777215);this.linewidth=1;this.linejoin=this.linecap="round";this.vertexColors=THREE.NoColors;this.fog=!0;this.setValues(a)};THREE.LineBasicMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.LineBasicMaterial.prototype.clone=function(){var a=new THREE.LineBasicMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.linewidth=this.linewidth;a.linecap=this.linecap;a.linejoin=this.linejoin;a.vertexColors=this.vertexColors;a.fog=this.fog;return a}; +THREE.LineDashedMaterial=function(a){THREE.Material.call(this);this.type="LineDashedMaterial";this.color=new THREE.Color(16777215);this.scale=this.linewidth=1;this.dashSize=3;this.gapSize=1;this.vertexColors=!1;this.fog=!0;this.setValues(a)};THREE.LineDashedMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.LineDashedMaterial.prototype.clone=function(){var a=new THREE.LineDashedMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.linewidth=this.linewidth;a.scale=this.scale;a.dashSize=this.dashSize;a.gapSize=this.gapSize;a.vertexColors=this.vertexColors;a.fog=this.fog;return a}; +THREE.MeshBasicMaterial=function(a){THREE.Material.call(this);this.type="MeshBasicMaterial";this.color=new THREE.Color(16777215);this.envMap=this.alphaMap=this.specularMap=this.lightMap=this.map=null;this.combine=THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphTargets=this.skinning=!1;this.setValues(a)}; +THREE.MeshBasicMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshBasicMaterial.prototype.clone=function(){var a=new THREE.MeshBasicMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.map=this.map;a.lightMap=this.lightMap;a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap; +a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;return a}; +THREE.MeshLambertMaterial=function(a){THREE.Material.call(this);this.type="MeshLambertMaterial";this.color=new THREE.Color(16777215);this.ambient=new THREE.Color(16777215);this.emissive=new THREE.Color(0);this.wrapAround=!1;this.wrapRGB=new THREE.Vector3(1,1,1);this.envMap=this.alphaMap=this.specularMap=this.lightMap=this.map=null;this.combine=THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth= +1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphNormals=this.morphTargets=this.skinning=!1;this.setValues(a)};THREE.MeshLambertMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshLambertMaterial.prototype.clone=function(){var a=new THREE.MeshLambertMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.ambient.copy(this.ambient);a.emissive.copy(this.emissive);a.wrapAround=this.wrapAround;a.wrapRGB.copy(this.wrapRGB);a.map=this.map;a.lightMap=this.lightMap;a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog; +a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap;a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;a.morphNormals=this.morphNormals;return a}; +THREE.MeshPhongMaterial=function(a){THREE.Material.call(this);this.type="MeshPhongMaterial";this.color=new THREE.Color(16777215);this.ambient=new THREE.Color(16777215);this.emissive=new THREE.Color(0);this.specular=new THREE.Color(1118481);this.shininess=30;this.wrapAround=this.metal=!1;this.wrapRGB=new THREE.Vector3(1,1,1);this.bumpMap=this.lightMap=this.map=null;this.bumpScale=1;this.normalMap=null;this.normalScale=new THREE.Vector2(1,1);this.envMap=this.alphaMap=this.specularMap=null;this.combine= +THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphNormals=this.morphTargets=this.skinning=!1;this.setValues(a)};THREE.MeshPhongMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshPhongMaterial.prototype.clone=function(){var a=new THREE.MeshPhongMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.ambient.copy(this.ambient);a.emissive.copy(this.emissive);a.specular.copy(this.specular);a.shininess=this.shininess;a.metal=this.metal;a.wrapAround=this.wrapAround;a.wrapRGB.copy(this.wrapRGB);a.map=this.map;a.lightMap=this.lightMap;a.bumpMap=this.bumpMap;a.bumpScale=this.bumpScale;a.normalMap=this.normalMap;a.normalScale.copy(this.normalScale); +a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap;a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;a.morphNormals=this.morphNormals;return a}; +THREE.MeshDepthMaterial=function(a){THREE.Material.call(this);this.type="MeshDepthMaterial";this.wireframe=this.morphTargets=!1;this.wireframeLinewidth=1;this.setValues(a)};THREE.MeshDepthMaterial.prototype=Object.create(THREE.Material.prototype);THREE.MeshDepthMaterial.prototype.clone=function(){var a=new THREE.MeshDepthMaterial;THREE.Material.prototype.clone.call(this,a);a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;return a}; +THREE.MeshNormalMaterial=function(a){THREE.Material.call(this,a);this.type="MeshNormalMaterial";this.shading=THREE.FlatShading;this.wireframe=!1;this.wireframeLinewidth=1;this.morphTargets=!1;this.setValues(a)};THREE.MeshNormalMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshNormalMaterial.prototype.clone=function(){var a=new THREE.MeshNormalMaterial;THREE.Material.prototype.clone.call(this,a);a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;return a};THREE.MeshFaceMaterial=function(a){this.uuid=THREE.Math.generateUUID();this.type="MeshFaceMaterial";this.materials=a instanceof Array?a:[]}; +THREE.MeshFaceMaterial.prototype={constructor:THREE.MeshFaceMaterial,toJSON:function(){for(var a={metadata:{version:4.2,type:"material",generator:"MaterialExporter"},uuid:this.uuid,type:this.type,materials:[]},b=0,c=this.materials.length;bf)){var m=b.origin.distanceTo(n);md.far||e.push({distance:m,point:k.clone().applyMatrix4(this.matrixWorld),face:null,faceIndex:null,object:this})}}}();THREE.Line.prototype.clone=function(a){void 0===a&&(a=new THREE.Line(this.geometry,this.material,this.mode));THREE.Object3D.prototype.clone.call(this,a);return a}; +THREE.Mesh=function(a,b){THREE.Object3D.call(this);this.type="Mesh";this.geometry=void 0!==a?a:new THREE.Geometry;this.material=void 0!==b?b:new THREE.MeshBasicMaterial({color:16777215*Math.random()});this.updateMorphTargets()};THREE.Mesh.prototype=Object.create(THREE.Object3D.prototype); +THREE.Mesh.prototype.updateMorphTargets=function(){if(void 0!==this.geometry.morphTargets&&0g.far||h.push({distance:x,point:K,face:new THREE.Face3(p,q,m,THREE.Triangle.normal(d,e,f)),faceIndex:null,object:this})}}}else for(s=p.position.array,t=k=0,w=s.length;k +g.far||h.push({distance:x,point:K,face:new THREE.Face3(p,q,m,THREE.Triangle.normal(d,e,f)),faceIndex:null,object:this}))}}else if(k instanceof THREE.Geometry)for(t=this.material instanceof THREE.MeshFaceMaterial,s=!0===t?this.material.materials:null,r=g.precision,u=k.vertices,v=0,y=k.faces.length;vg.far||h.push({distance:x,point:K,face:G,faceIndex:v,object:this}))}}}();THREE.Mesh.prototype.clone=function(a,b){void 0===a&&(a=new THREE.Mesh(this.geometry,this.material));THREE.Object3D.prototype.clone.call(this,a,b);return a};THREE.Bone=function(a){THREE.Object3D.call(this);this.skin=a};THREE.Bone.prototype=Object.create(THREE.Object3D.prototype); +THREE.Skeleton=function(a,b,c){this.useVertexTexture=void 0!==c?c:!0;this.identityMatrix=new THREE.Matrix4;a=a||[];this.bones=a.slice(0);this.useVertexTexture?(this.boneTextureHeight=this.boneTextureWidth=a=256h.end&&(h.end=e);b||(b=g)}}a.firstAnimation=b}; +THREE.MorphAnimMesh.prototype.setAnimationLabel=function(a,b,c){this.geometry.animations||(this.geometry.animations={});this.geometry.animations[a]={start:b,end:c}};THREE.MorphAnimMesh.prototype.playAnimation=function(a,b){var c=this.geometry.animations[a];c?(this.setFrameRange(c.start,c.end),this.duration=(c.end-c.start)/b*1E3,this.time=0):console.warn("animation["+a+"] undefined")}; +THREE.MorphAnimMesh.prototype.updateAnimation=function(a){var b=this.duration/this.length;this.time+=this.direction*a;if(this.mirroredLoop){if(this.time>this.duration||0>this.time)this.direction*=-1,this.time>this.duration&&(this.time=this.duration,this.directionBackwards=!0),0>this.time&&(this.time=0,this.directionBackwards=!1)}else this.time%=this.duration,0>this.time&&(this.time+=this.duration);a=this.startKeyframe+THREE.Math.clamp(Math.floor(this.time/b),0,this.length-1);a!==this.currentKeyframe&& +(this.morphTargetInfluences[this.lastKeyframe]=0,this.morphTargetInfluences[this.currentKeyframe]=1,this.morphTargetInfluences[a]=0,this.lastKeyframe=this.currentKeyframe,this.currentKeyframe=a);b=this.time%b/b;this.directionBackwards&&(b=1-b);this.morphTargetInfluences[this.currentKeyframe]=b;this.morphTargetInfluences[this.lastKeyframe]=1-b}; +THREE.MorphAnimMesh.prototype.interpolateTargets=function(a,b,c){for(var d=this.morphTargetInfluences,e=0,f=d.length;e=this.objects[d].distance)this.objects[d-1].object.visible=!1,this.objects[d].object.visible=!0;else break;for(;dthis.scale.x||c.push({distance:d,point:this.position,face:null,object:this})}}();THREE.Sprite.prototype.clone=function(a){void 0===a&&(a=new THREE.Sprite(this.material));THREE.Object3D.prototype.clone.call(this,a);return a};THREE.Particle=THREE.Sprite; +THREE.LensFlare=function(a,b,c,d,e){THREE.Object3D.call(this);this.lensFlares=[];this.positionScreen=new THREE.Vector3;this.customUpdateCallback=void 0;void 0!==a&&this.add(a,b,c,d,e)};THREE.LensFlare.prototype=Object.create(THREE.Object3D.prototype); +THREE.LensFlare.prototype.add=function(a,b,c,d,e,f){void 0===b&&(b=-1);void 0===c&&(c=0);void 0===f&&(f=1);void 0===e&&(e=new THREE.Color(16777215));void 0===d&&(d=THREE.NormalBlending);c=Math.min(c,Math.max(0,c));this.lensFlares.push({texture:a,size:b,distance:c,x:0,y:0,z:0,scale:1,rotation:1,opacity:f,color:e,blending:d})}; +THREE.LensFlare.prototype.updateLensFlares=function(){var a,b=this.lensFlares.length,c,d=2*-this.positionScreen.x,e=2*-this.positionScreen.y;for(a=0;a dashSize ) {\n\t\tdiscard;\n\t}\n\tgl_FragColor = vec4( diffuse, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,THREE.ShaderChunk.color_fragment,THREE.ShaderChunk.fog_fragment, +"}"].join("\n")},depth:{uniforms:{mNear:{type:"f",value:1},mFar:{type:"f",value:2E3},opacity:{type:"f",value:1}},vertexShader:[THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform float mNear;\nuniform float mFar;\nuniform float opacity;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {",THREE.ShaderChunk.logdepthbuf_fragment, +"\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tfloat depth = gl_FragDepthEXT / gl_FragCoord.w;\n\t#else\n\t\tfloat depth = gl_FragCoord.z / gl_FragCoord.w;\n\t#endif\n\tfloat color = 1.0 - smoothstep( mNear, mFar, depth );\n\tgl_FragColor = vec4( vec3( color ), opacity );\n}"].join("\n")},normal:{uniforms:{opacity:{type:"f",value:1}},vertexShader:["varying vec3 vNormal;",THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {\n\tvNormal = normalize( normalMatrix * normal );", +THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform float opacity;\nvarying vec3 vNormal;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tgl_FragColor = vec4( 0.5 * normalize( vNormal ) + 0.5, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,"}"].join("\n")},normalmap:{uniforms:THREE.UniformsUtils.merge([THREE.UniformsLib.fog,THREE.UniformsLib.lights,THREE.UniformsLib.shadowmap,{enableAO:{type:"i", +value:0},enableDiffuse:{type:"i",value:0},enableSpecular:{type:"i",value:0},enableReflection:{type:"i",value:0},enableDisplacement:{type:"i",value:0},tDisplacement:{type:"t",value:null},tDiffuse:{type:"t",value:null},tCube:{type:"t",value:null},tNormal:{type:"t",value:null},tSpecular:{type:"t",value:null},tAO:{type:"t",value:null},uNormalScale:{type:"v2",value:new THREE.Vector2(1,1)},uDisplacementBias:{type:"f",value:0},uDisplacementScale:{type:"f",value:1},diffuse:{type:"c",value:new THREE.Color(16777215)}, +specular:{type:"c",value:new THREE.Color(1118481)},ambient:{type:"c",value:new THREE.Color(16777215)},shininess:{type:"f",value:30},opacity:{type:"f",value:1},useRefract:{type:"i",value:0},refractionRatio:{type:"f",value:.98},reflectivity:{type:"f",value:.5},uOffset:{type:"v2",value:new THREE.Vector2(0,0)},uRepeat:{type:"v2",value:new THREE.Vector2(1,1)},wrapRGB:{type:"v3",value:new THREE.Vector3(1,1,1)}}]),fragmentShader:["uniform vec3 ambient;\nuniform vec3 diffuse;\nuniform vec3 specular;\nuniform float shininess;\nuniform float opacity;\nuniform bool enableDiffuse;\nuniform bool enableSpecular;\nuniform bool enableAO;\nuniform bool enableReflection;\nuniform sampler2D tDiffuse;\nuniform sampler2D tNormal;\nuniform sampler2D tSpecular;\nuniform sampler2D tAO;\nuniform samplerCube tCube;\nuniform vec2 uNormalScale;\nuniform bool useRefract;\nuniform float refractionRatio;\nuniform float reflectivity;\nvarying vec3 vTangent;\nvarying vec3 vBinormal;\nvarying vec3 vNormal;\nvarying vec2 vUv;\nuniform vec3 ambientLightColor;\n#if MAX_DIR_LIGHTS > 0\n\tuniform vec3 directionalLightColor[ MAX_DIR_LIGHTS ];\n\tuniform vec3 directionalLightDirection[ MAX_DIR_LIGHTS ];\n#endif\n#if MAX_HEMI_LIGHTS > 0\n\tuniform vec3 hemisphereLightSkyColor[ MAX_HEMI_LIGHTS ];\n\tuniform vec3 hemisphereLightGroundColor[ MAX_HEMI_LIGHTS ];\n\tuniform vec3 hemisphereLightDirection[ MAX_HEMI_LIGHTS ];\n#endif\n#if MAX_POINT_LIGHTS > 0\n\tuniform vec3 pointLightColor[ MAX_POINT_LIGHTS ];\n\tuniform vec3 pointLightPosition[ MAX_POINT_LIGHTS ];\n\tuniform float pointLightDistance[ MAX_POINT_LIGHTS ];\n#endif\n#if MAX_SPOT_LIGHTS > 0\n\tuniform vec3 spotLightColor[ MAX_SPOT_LIGHTS ];\n\tuniform vec3 spotLightPosition[ MAX_SPOT_LIGHTS ];\n\tuniform vec3 spotLightDirection[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightAngleCos[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightExponent[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightDistance[ MAX_SPOT_LIGHTS ];\n#endif\n#ifdef WRAP_AROUND\n\tuniform vec3 wrapRGB;\n#endif\nvarying vec3 vWorldPosition;\nvarying vec3 vViewPosition;", +THREE.ShaderChunk.shadowmap_pars_fragment,THREE.ShaderChunk.fog_pars_fragment,THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {",THREE.ShaderChunk.logdepthbuf_fragment,"\tgl_FragColor = vec4( vec3( 1.0 ), opacity );\n\tvec3 specularTex = vec3( 1.0 );\n\tvec3 normalTex = texture2D( tNormal, vUv ).xyz * 2.0 - 1.0;\n\tnormalTex.xy *= uNormalScale;\n\tnormalTex = normalize( normalTex );\n\tif( enableDiffuse ) {\n\t\t#ifdef GAMMA_INPUT\n\t\t\tvec4 texelColor = texture2D( tDiffuse, vUv );\n\t\t\ttexelColor.xyz *= texelColor.xyz;\n\t\t\tgl_FragColor = gl_FragColor * texelColor;\n\t\t#else\n\t\t\tgl_FragColor = gl_FragColor * texture2D( tDiffuse, vUv );\n\t\t#endif\n\t}\n\tif( enableAO ) {\n\t\t#ifdef GAMMA_INPUT\n\t\t\tvec4 aoColor = texture2D( tAO, vUv );\n\t\t\taoColor.xyz *= aoColor.xyz;\n\t\t\tgl_FragColor.xyz = gl_FragColor.xyz * aoColor.xyz;\n\t\t#else\n\t\t\tgl_FragColor.xyz = gl_FragColor.xyz * texture2D( tAO, vUv ).xyz;\n\t\t#endif\n\t}", +THREE.ShaderChunk.alphatest_fragment,"\tif( enableSpecular )\n\t\tspecularTex = texture2D( tSpecular, vUv ).xyz;\n\tmat3 tsb = mat3( normalize( vTangent ), normalize( vBinormal ), normalize( vNormal ) );\n\tvec3 finalNormal = tsb * normalTex;\n\t#ifdef FLIP_SIDED\n\t\tfinalNormal = -finalNormal;\n\t#endif\n\tvec3 normal = normalize( finalNormal );\n\tvec3 viewPosition = normalize( vViewPosition );\n\t#if MAX_POINT_LIGHTS > 0\n\t\tvec3 pointDiffuse = vec3( 0.0 );\n\t\tvec3 pointSpecular = vec3( 0.0 );\n\t\tfor ( int i = 0; i < MAX_POINT_LIGHTS; i ++ ) {\n\t\t\tvec4 lPosition = viewMatrix * vec4( pointLightPosition[ i ], 1.0 );\n\t\t\tvec3 pointVector = lPosition.xyz + vViewPosition.xyz;\n\t\t\tfloat pointDistance = 1.0;\n\t\t\tif ( pointLightDistance[ i ] > 0.0 )\n\t\t\t\tpointDistance = 1.0 - min( ( length( pointVector ) / pointLightDistance[ i ] ), 1.0 );\n\t\t\tpointVector = normalize( pointVector );\n\t\t\t#ifdef WRAP_AROUND\n\t\t\t\tfloat pointDiffuseWeightFull = max( dot( normal, pointVector ), 0.0 );\n\t\t\t\tfloat pointDiffuseWeightHalf = max( 0.5 * dot( normal, pointVector ) + 0.5, 0.0 );\n\t\t\t\tvec3 pointDiffuseWeight = mix( vec3( pointDiffuseWeightFull ), vec3( pointDiffuseWeightHalf ), wrapRGB );\n\t\t\t#else\n\t\t\t\tfloat pointDiffuseWeight = max( dot( normal, pointVector ), 0.0 );\n\t\t\t#endif\n\t\t\tpointDiffuse += pointDistance * pointLightColor[ i ] * diffuse * pointDiffuseWeight;\n\t\t\tvec3 pointHalfVector = normalize( pointVector + viewPosition );\n\t\t\tfloat pointDotNormalHalf = max( dot( normal, pointHalfVector ), 0.0 );\n\t\t\tfloat pointSpecularWeight = specularTex.r * max( pow( pointDotNormalHalf, shininess ), 0.0 );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( pointVector, pointHalfVector ), 0.0 ), 5.0 );\n\t\t\tpointSpecular += schlick * pointLightColor[ i ] * pointSpecularWeight * pointDiffuseWeight * pointDistance * specularNormalization;\n\t\t}\n\t#endif\n\t#if MAX_SPOT_LIGHTS > 0\n\t\tvec3 spotDiffuse = vec3( 0.0 );\n\t\tvec3 spotSpecular = vec3( 0.0 );\n\t\tfor ( int i = 0; i < MAX_SPOT_LIGHTS; i ++ ) {\n\t\t\tvec4 lPosition = viewMatrix * vec4( spotLightPosition[ i ], 1.0 );\n\t\t\tvec3 spotVector = lPosition.xyz + vViewPosition.xyz;\n\t\t\tfloat spotDistance = 1.0;\n\t\t\tif ( spotLightDistance[ i ] > 0.0 )\n\t\t\t\tspotDistance = 1.0 - min( ( length( spotVector ) / spotLightDistance[ i ] ), 1.0 );\n\t\t\tspotVector = normalize( spotVector );\n\t\t\tfloat spotEffect = dot( spotLightDirection[ i ], normalize( spotLightPosition[ i ] - vWorldPosition ) );\n\t\t\tif ( spotEffect > spotLightAngleCos[ i ] ) {\n\t\t\t\tspotEffect = max( pow( max( spotEffect, 0.0 ), spotLightExponent[ i ] ), 0.0 );\n\t\t\t\t#ifdef WRAP_AROUND\n\t\t\t\t\tfloat spotDiffuseWeightFull = max( dot( normal, spotVector ), 0.0 );\n\t\t\t\t\tfloat spotDiffuseWeightHalf = max( 0.5 * dot( normal, spotVector ) + 0.5, 0.0 );\n\t\t\t\t\tvec3 spotDiffuseWeight = mix( vec3( spotDiffuseWeightFull ), vec3( spotDiffuseWeightHalf ), wrapRGB );\n\t\t\t\t#else\n\t\t\t\t\tfloat spotDiffuseWeight = max( dot( normal, spotVector ), 0.0 );\n\t\t\t\t#endif\n\t\t\t\tspotDiffuse += spotDistance * spotLightColor[ i ] * diffuse * spotDiffuseWeight * spotEffect;\n\t\t\t\tvec3 spotHalfVector = normalize( spotVector + viewPosition );\n\t\t\t\tfloat spotDotNormalHalf = max( dot( normal, spotHalfVector ), 0.0 );\n\t\t\t\tfloat spotSpecularWeight = specularTex.r * max( pow( spotDotNormalHalf, shininess ), 0.0 );\n\t\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( spotVector, spotHalfVector ), 0.0 ), 5.0 );\n\t\t\t\tspotSpecular += schlick * spotLightColor[ i ] * spotSpecularWeight * spotDiffuseWeight * spotDistance * specularNormalization * spotEffect;\n\t\t\t}\n\t\t}\n\t#endif\n\t#if MAX_DIR_LIGHTS > 0\n\t\tvec3 dirDiffuse = vec3( 0.0 );\n\t\tvec3 dirSpecular = vec3( 0.0 );\n\t\tfor( int i = 0; i < MAX_DIR_LIGHTS; i++ ) {\n\t\t\tvec4 lDirection = viewMatrix * vec4( directionalLightDirection[ i ], 0.0 );\n\t\t\tvec3 dirVector = normalize( lDirection.xyz );\n\t\t\t#ifdef WRAP_AROUND\n\t\t\t\tfloat directionalLightWeightingFull = max( dot( normal, dirVector ), 0.0 );\n\t\t\t\tfloat directionalLightWeightingHalf = max( 0.5 * dot( normal, dirVector ) + 0.5, 0.0 );\n\t\t\t\tvec3 dirDiffuseWeight = mix( vec3( directionalLightWeightingFull ), vec3( directionalLightWeightingHalf ), wrapRGB );\n\t\t\t#else\n\t\t\t\tfloat dirDiffuseWeight = max( dot( normal, dirVector ), 0.0 );\n\t\t\t#endif\n\t\t\tdirDiffuse += directionalLightColor[ i ] * diffuse * dirDiffuseWeight;\n\t\t\tvec3 dirHalfVector = normalize( dirVector + viewPosition );\n\t\t\tfloat dirDotNormalHalf = max( dot( normal, dirHalfVector ), 0.0 );\n\t\t\tfloat dirSpecularWeight = specularTex.r * max( pow( dirDotNormalHalf, shininess ), 0.0 );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( dirVector, dirHalfVector ), 0.0 ), 5.0 );\n\t\t\tdirSpecular += schlick * directionalLightColor[ i ] * dirSpecularWeight * dirDiffuseWeight * specularNormalization;\n\t\t}\n\t#endif\n\t#if MAX_HEMI_LIGHTS > 0\n\t\tvec3 hemiDiffuse = vec3( 0.0 );\n\t\tvec3 hemiSpecular = vec3( 0.0 );\n\t\tfor( int i = 0; i < MAX_HEMI_LIGHTS; i ++ ) {\n\t\t\tvec4 lDirection = viewMatrix * vec4( hemisphereLightDirection[ i ], 0.0 );\n\t\t\tvec3 lVector = normalize( lDirection.xyz );\n\t\t\tfloat dotProduct = dot( normal, lVector );\n\t\t\tfloat hemiDiffuseWeight = 0.5 * dotProduct + 0.5;\n\t\t\tvec3 hemiColor = mix( hemisphereLightGroundColor[ i ], hemisphereLightSkyColor[ i ], hemiDiffuseWeight );\n\t\t\themiDiffuse += diffuse * hemiColor;\n\t\t\tvec3 hemiHalfVectorSky = normalize( lVector + viewPosition );\n\t\t\tfloat hemiDotNormalHalfSky = 0.5 * dot( normal, hemiHalfVectorSky ) + 0.5;\n\t\t\tfloat hemiSpecularWeightSky = specularTex.r * max( pow( max( hemiDotNormalHalfSky, 0.0 ), shininess ), 0.0 );\n\t\t\tvec3 lVectorGround = -lVector;\n\t\t\tvec3 hemiHalfVectorGround = normalize( lVectorGround + viewPosition );\n\t\t\tfloat hemiDotNormalHalfGround = 0.5 * dot( normal, hemiHalfVectorGround ) + 0.5;\n\t\t\tfloat hemiSpecularWeightGround = specularTex.r * max( pow( max( hemiDotNormalHalfGround, 0.0 ), shininess ), 0.0 );\n\t\t\tfloat dotProductGround = dot( normal, lVectorGround );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlickSky = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( lVector, hemiHalfVectorSky ), 0.0 ), 5.0 );\n\t\t\tvec3 schlickGround = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( lVectorGround, hemiHalfVectorGround ), 0.0 ), 5.0 );\n\t\t\themiSpecular += hemiColor * specularNormalization * ( schlickSky * hemiSpecularWeightSky * max( dotProduct, 0.0 ) + schlickGround * hemiSpecularWeightGround * max( dotProductGround, 0.0 ) );\n\t\t}\n\t#endif\n\tvec3 totalDiffuse = vec3( 0.0 );\n\tvec3 totalSpecular = vec3( 0.0 );\n\t#if MAX_DIR_LIGHTS > 0\n\t\ttotalDiffuse += dirDiffuse;\n\t\ttotalSpecular += dirSpecular;\n\t#endif\n\t#if MAX_HEMI_LIGHTS > 0\n\t\ttotalDiffuse += hemiDiffuse;\n\t\ttotalSpecular += hemiSpecular;\n\t#endif\n\t#if MAX_POINT_LIGHTS > 0\n\t\ttotalDiffuse += pointDiffuse;\n\t\ttotalSpecular += pointSpecular;\n\t#endif\n\t#if MAX_SPOT_LIGHTS > 0\n\t\ttotalDiffuse += spotDiffuse;\n\t\ttotalSpecular += spotSpecular;\n\t#endif\n\t#ifdef METAL\n\t\tgl_FragColor.xyz = gl_FragColor.xyz * ( totalDiffuse + ambientLightColor * ambient + totalSpecular );\n\t#else\n\t\tgl_FragColor.xyz = gl_FragColor.xyz * ( totalDiffuse + ambientLightColor * ambient ) + totalSpecular;\n\t#endif\n\tif ( enableReflection ) {\n\t\tvec3 vReflect;\n\t\tvec3 cameraToVertex = normalize( vWorldPosition - cameraPosition );\n\t\tif ( useRefract ) {\n\t\t\tvReflect = refract( cameraToVertex, normal, refractionRatio );\n\t\t} else {\n\t\t\tvReflect = reflect( cameraToVertex, normal );\n\t\t}\n\t\tvec4 cubeColor = textureCube( tCube, vec3( -vReflect.x, vReflect.yz ) );\n\t\t#ifdef GAMMA_INPUT\n\t\t\tcubeColor.xyz *= cubeColor.xyz;\n\t\t#endif\n\t\tgl_FragColor.xyz = mix( gl_FragColor.xyz, cubeColor.xyz, specularTex.r * reflectivity );\n\t}", +THREE.ShaderChunk.shadowmap_fragment,THREE.ShaderChunk.linear_to_gamma_fragment,THREE.ShaderChunk.fog_fragment,"}"].join("\n"),vertexShader:["attribute vec4 tangent;\nuniform vec2 uOffset;\nuniform vec2 uRepeat;\nuniform bool enableDisplacement;\n#ifdef VERTEX_TEXTURES\n\tuniform sampler2D tDisplacement;\n\tuniform float uDisplacementScale;\n\tuniform float uDisplacementBias;\n#endif\nvarying vec3 vTangent;\nvarying vec3 vBinormal;\nvarying vec3 vNormal;\nvarying vec2 vUv;\nvarying vec3 vWorldPosition;\nvarying vec3 vViewPosition;", +THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.shadowmap_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.skinbase_vertex,THREE.ShaderChunk.skinnormal_vertex,"\t#ifdef USE_SKINNING\n\t\tvNormal = normalize( normalMatrix * skinnedNormal.xyz );\n\t\tvec4 skinnedTangent = skinMatrix * vec4( tangent.xyz, 0.0 );\n\t\tvTangent = normalize( normalMatrix * skinnedTangent.xyz );\n\t#else\n\t\tvNormal = normalize( normalMatrix * normal );\n\t\tvTangent = normalize( normalMatrix * tangent.xyz );\n\t#endif\n\tvBinormal = normalize( cross( vNormal, vTangent ) * tangent.w );\n\tvUv = uv * uRepeat + uOffset;\n\tvec3 displacedPosition;\n\t#ifdef VERTEX_TEXTURES\n\t\tif ( enableDisplacement ) {\n\t\t\tvec3 dv = texture2D( tDisplacement, uv ).xyz;\n\t\t\tfloat df = uDisplacementScale * dv.x + uDisplacementBias;\n\t\t\tdisplacedPosition = position + normalize( normal ) * df;\n\t\t} else {\n\t\t\t#ifdef USE_SKINNING\n\t\t\t\tvec4 skinVertex = bindMatrix * vec4( position, 1.0 );\n\t\t\t\tvec4 skinned = vec4( 0.0 );\n\t\t\t\tskinned += boneMatX * skinVertex * skinWeight.x;\n\t\t\t\tskinned += boneMatY * skinVertex * skinWeight.y;\n\t\t\t\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\t\t\t\tskinned += boneMatW * skinVertex * skinWeight.w;\n\t\t\t\tskinned = bindMatrixInverse * skinned;\n\t\t\t\tdisplacedPosition = skinned.xyz;\n\t\t\t#else\n\t\t\t\tdisplacedPosition = position;\n\t\t\t#endif\n\t\t}\n\t#else\n\t\t#ifdef USE_SKINNING\n\t\t\tvec4 skinVertex = bindMatrix * vec4( position, 1.0 );\n\t\t\tvec4 skinned = vec4( 0.0 );\n\t\t\tskinned += boneMatX * skinVertex * skinWeight.x;\n\t\t\tskinned += boneMatY * skinVertex * skinWeight.y;\n\t\t\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\t\t\tskinned += boneMatW * skinVertex * skinWeight.w;\n\t\t\tskinned = bindMatrixInverse * skinned;\n\t\t\tdisplacedPosition = skinned.xyz;\n\t\t#else\n\t\t\tdisplacedPosition = position;\n\t\t#endif\n\t#endif\n\tvec4 mvPosition = modelViewMatrix * vec4( displacedPosition, 1.0 );\n\tvec4 worldPosition = modelMatrix * vec4( displacedPosition, 1.0 );\n\tgl_Position = projectionMatrix * mvPosition;", +THREE.ShaderChunk.logdepthbuf_vertex,"\tvWorldPosition = worldPosition.xyz;\n\tvViewPosition = -mvPosition.xyz;\n\t#ifdef USE_SHADOWMAP\n\t\tfor( int i = 0; i < MAX_SHADOWS; i ++ ) {\n\t\t\tvShadowCoord[ i ] = shadowMatrix[ i ] * worldPosition;\n\t\t}\n\t#endif\n}"].join("\n")},cube:{uniforms:{tCube:{type:"t",value:null},tFlip:{type:"f",value:-1}},vertexShader:["varying vec3 vWorldPosition;",THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {\n\tvec4 worldPosition = modelMatrix * vec4( position, 1.0 );\n\tvWorldPosition = worldPosition.xyz;\n\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );", +THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform samplerCube tCube;\nuniform float tFlip;\nvarying vec3 vWorldPosition;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tgl_FragColor = textureCube( tCube, vec3( tFlip * vWorldPosition.x, vWorldPosition.yz ) );",THREE.ShaderChunk.logdepthbuf_fragment,"}"].join("\n")},depthRGBA:{uniforms:{},vertexShader:[THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex, +"void main() {",THREE.ShaderChunk.skinbase_vertex,THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.skinning_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:[THREE.ShaderChunk.logdepthbuf_pars_fragment,"vec4 pack_depth( const in float depth ) {\n\tconst vec4 bit_shift = vec4( 256.0 * 256.0 * 256.0, 256.0 * 256.0, 256.0, 1.0 );\n\tconst vec4 bit_mask = vec4( 0.0, 1.0 / 256.0, 1.0 / 256.0, 1.0 / 256.0 );\n\tvec4 res = mod( depth * bit_shift * vec4( 255 ), vec4( 256 ) ) / vec4( 255 );\n\tres -= res.xxyz * bit_mask;\n\treturn res;\n}\nvoid main() {", +THREE.ShaderChunk.logdepthbuf_fragment,"\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tgl_FragData[ 0 ] = pack_depth( gl_FragDepthEXT );\n\t#else\n\t\tgl_FragData[ 0 ] = pack_depth( gl_FragCoord.z );\n\t#endif\n}"].join("\n")}}; +THREE.WebGLRenderer=function(a){function b(a){var b=a.geometry;a=a.material;var c=b.vertices.length;if(a.attributes){void 0===b.__webglCustomAttributesList&&(b.__webglCustomAttributesList=[]);for(var d in a.attributes){var e=a.attributes[d];if(!e.__webglInitialized||e.createUniqueBuffers){e.__webglInitialized=!0;var f=1;"v2"===e.type?f=2:"v3"===e.type?f=3:"v4"===e.type?f=4:"c"===e.type&&(f=3);e.size=f;e.array=new Float32Array(c*f);e.buffer=l.createBuffer();e.buffer.belongsToAttribute=d;e.needsUpdate= +!0}b.__webglCustomAttributesList.push(e)}}}function c(a,b){var c=b.geometry,e=a.faces3,f=3*e.length,g=1*e.length,h=3*e.length,e=d(b,a);a.__vertexArray=new Float32Array(3*f);a.__normalArray=new Float32Array(3*f);a.__colorArray=new Float32Array(3*f);a.__uvArray=new Float32Array(2*f);1Aa;Aa++)Cb=ma[Aa],Ta[Sa]=Cb.x,Ta[Sa+1]=Cb.y,Ta[Sa+2]=Cb.z,Sa+=3;else for(Aa=0;3>Aa;Aa++)Ta[Sa]=pa.x,Ta[Sa+1]=pa.y,Ta[Sa+2]=pa.z,Sa+=3;l.bindBuffer(l.ARRAY_BUFFER,C.__webglNormalBuffer);l.bufferData(l.ARRAY_BUFFER, +Ta,S)}if(Kc&&ua){M=0;for(ea=N.length;MAa;Aa++)Oa=hb[Aa],sb[qb]=Oa.x,sb[qb+1]=Oa.y,qb+=2;0Aa;Aa++)Qb=za[Aa],fb[rb]=Qb.x,fb[rb+1]=Qb.y,rb+=2;0h&&(f[v].counter+=1,k=f[v].hash+"_"+f[v].counter,k in r||(p={id:rc++, +faces3:[],materialIndex:v,vertices:0,numMorphTargets:m,numMorphNormals:n},r[k]=p,q.push(p)));r[k].faces3.push(t);r[k].vertices+=3}a[g]=q;d.groupsNeedUpdate=!1}a=xb[d.id];g=0;for(e=a.length;gDa;Da++)kb[Da]=!J.autoScaleCubemaps||Ob||Tb?Tb?ua.image[Da].image:ua.image[Da]:R(ua.image[Da],$c);var ka=kb[0],Zb=THREE.Math.isPowerOfTwo(ka.width)&&THREE.Math.isPowerOfTwo(ka.height),ab=Q(ua.format),Fb=Q(ua.type);F(l.TEXTURE_CUBE_MAP,ua,Zb);for(Da=0;6>Da;Da++)if(Ob)for(var gb,$b=kb[Da].mipmaps,ga=0,Xb=$b.length;ga=Oc&&console.warn("WebGLRenderer: trying to use "+a+" texture units while this GPU supports only "+ +Oc);dc+=1;return a}function x(a,b){a._modelViewMatrix.multiplyMatrices(b.matrixWorldInverse,a.matrixWorld);a._normalMatrix.getNormalMatrix(a._modelViewMatrix)}function D(a,b,c,d){a[b]=c.r*c.r*d;a[b+1]=c.g*c.g*d;a[b+2]=c.b*c.b*d}function E(a,b,c,d){a[b]=c.r*d;a[b+1]=c.g*d;a[b+2]=c.b*d}function A(a){a!==Pc&&(l.lineWidth(a),Pc=a)}function B(a,b,c){Qc!==a&&(a?l.enable(l.POLYGON_OFFSET_FILL):l.disable(l.POLYGON_OFFSET_FILL),Qc=a);!a||Rc===b&&Sc===c||(l.polygonOffset(b,c),Rc=b,Sc=c)}function F(a,b,c){c? +(l.texParameteri(a,l.TEXTURE_WRAP_S,Q(b.wrapS)),l.texParameteri(a,l.TEXTURE_WRAP_T,Q(b.wrapT)),l.texParameteri(a,l.TEXTURE_MAG_FILTER,Q(b.magFilter)),l.texParameteri(a,l.TEXTURE_MIN_FILTER,Q(b.minFilter))):(l.texParameteri(a,l.TEXTURE_WRAP_S,l.CLAMP_TO_EDGE),l.texParameteri(a,l.TEXTURE_WRAP_T,l.CLAMP_TO_EDGE),l.texParameteri(a,l.TEXTURE_MAG_FILTER,T(b.magFilter)),l.texParameteri(a,l.TEXTURE_MIN_FILTER,T(b.minFilter)));(c=pa.get("EXT_texture_filter_anisotropic"))&&b.type!==THREE.FloatType&&(1b||a.height>b){var c=b/Math.max(a.width,a.height),d=document.createElement("canvas");d.width=Math.floor(a.width*c);d.height=Math.floor(a.height*c);d.getContext("2d").drawImage(a,0,0,a.width,a.height,0,0,d.width,d.height);console.log("THREE.WebGLRenderer:",a,"is too big ("+a.width+"x"+a.height+"). Resized to "+d.width+"x"+d.height+ +".");return d}return a}function H(a,b){l.bindRenderbuffer(l.RENDERBUFFER,a);b.depthBuffer&&!b.stencilBuffer?(l.renderbufferStorage(l.RENDERBUFFER,l.DEPTH_COMPONENT16,b.width,b.height),l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_ATTACHMENT,l.RENDERBUFFER,a)):b.depthBuffer&&b.stencilBuffer?(l.renderbufferStorage(l.RENDERBUFFER,l.DEPTH_STENCIL,b.width,b.height),l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_STENCIL_ATTACHMENT,l.RENDERBUFFER,a)):l.renderbufferStorage(l.RENDERBUFFER,l.RGBA4,b.width, +b.height)}function C(a){a instanceof THREE.WebGLRenderTargetCube?(l.bindTexture(l.TEXTURE_CUBE_MAP,a.__webglTexture),l.generateMipmap(l.TEXTURE_CUBE_MAP),l.bindTexture(l.TEXTURE_CUBE_MAP,null)):(l.bindTexture(l.TEXTURE_2D,a.__webglTexture),l.generateMipmap(l.TEXTURE_2D),l.bindTexture(l.TEXTURE_2D,null))}function T(a){return a===THREE.NearestFilter||a===THREE.NearestMipMapNearestFilter||a===THREE.NearestMipMapLinearFilter?l.NEAREST:l.LINEAR}function Q(a){var b;if(a===THREE.RepeatWrapping)return l.REPEAT; +if(a===THREE.ClampToEdgeWrapping)return l.CLAMP_TO_EDGE;if(a===THREE.MirroredRepeatWrapping)return l.MIRRORED_REPEAT;if(a===THREE.NearestFilter)return l.NEAREST;if(a===THREE.NearestMipMapNearestFilter)return l.NEAREST_MIPMAP_NEAREST;if(a===THREE.NearestMipMapLinearFilter)return l.NEAREST_MIPMAP_LINEAR;if(a===THREE.LinearFilter)return l.LINEAR;if(a===THREE.LinearMipMapNearestFilter)return l.LINEAR_MIPMAP_NEAREST;if(a===THREE.LinearMipMapLinearFilter)return l.LINEAR_MIPMAP_LINEAR;if(a===THREE.UnsignedByteType)return l.UNSIGNED_BYTE; +if(a===THREE.UnsignedShort4444Type)return l.UNSIGNED_SHORT_4_4_4_4;if(a===THREE.UnsignedShort5551Type)return l.UNSIGNED_SHORT_5_5_5_1;if(a===THREE.UnsignedShort565Type)return l.UNSIGNED_SHORT_5_6_5;if(a===THREE.ByteType)return l.BYTE;if(a===THREE.ShortType)return l.SHORT;if(a===THREE.UnsignedShortType)return l.UNSIGNED_SHORT;if(a===THREE.IntType)return l.INT;if(a===THREE.UnsignedIntType)return l.UNSIGNED_INT;if(a===THREE.FloatType)return l.FLOAT;if(a===THREE.AlphaFormat)return l.ALPHA;if(a===THREE.RGBFormat)return l.RGB; +if(a===THREE.RGBAFormat)return l.RGBA;if(a===THREE.LuminanceFormat)return l.LUMINANCE;if(a===THREE.LuminanceAlphaFormat)return l.LUMINANCE_ALPHA;if(a===THREE.AddEquation)return l.FUNC_ADD;if(a===THREE.SubtractEquation)return l.FUNC_SUBTRACT;if(a===THREE.ReverseSubtractEquation)return l.FUNC_REVERSE_SUBTRACT;if(a===THREE.ZeroFactor)return l.ZERO;if(a===THREE.OneFactor)return l.ONE;if(a===THREE.SrcColorFactor)return l.SRC_COLOR;if(a===THREE.OneMinusSrcColorFactor)return l.ONE_MINUS_SRC_COLOR;if(a=== +THREE.SrcAlphaFactor)return l.SRC_ALPHA;if(a===THREE.OneMinusSrcAlphaFactor)return l.ONE_MINUS_SRC_ALPHA;if(a===THREE.DstAlphaFactor)return l.DST_ALPHA;if(a===THREE.OneMinusDstAlphaFactor)return l.ONE_MINUS_DST_ALPHA;if(a===THREE.DstColorFactor)return l.DST_COLOR;if(a===THREE.OneMinusDstColorFactor)return l.ONE_MINUS_DST_COLOR;if(a===THREE.SrcAlphaSaturateFactor)return l.SRC_ALPHA_SATURATE;b=pa.get("WEBGL_compressed_texture_s3tc");if(null!==b){if(a===THREE.RGB_S3TC_DXT1_Format)return b.COMPRESSED_RGB_S3TC_DXT1_EXT; +if(a===THREE.RGBA_S3TC_DXT1_Format)return b.COMPRESSED_RGBA_S3TC_DXT1_EXT;if(a===THREE.RGBA_S3TC_DXT3_Format)return b.COMPRESSED_RGBA_S3TC_DXT3_EXT;if(a===THREE.RGBA_S3TC_DXT5_Format)return b.COMPRESSED_RGBA_S3TC_DXT5_EXT}b=pa.get("WEBGL_compressed_texture_pvrtc");if(null!==b){if(a===THREE.RGB_PVRTC_4BPPV1_Format)return b.COMPRESSED_RGB_PVRTC_4BPPV1_IMG;if(a===THREE.RGB_PVRTC_2BPPV1_Format)return b.COMPRESSED_RGB_PVRTC_2BPPV1_IMG;if(a===THREE.RGBA_PVRTC_4BPPV1_Format)return b.COMPRESSED_RGBA_PVRTC_4BPPV1_IMG; +if(a===THREE.RGBA_PVRTC_2BPPV1_Format)return b.COMPRESSED_RGBA_PVRTC_2BPPV1_IMG}b=pa.get("EXT_blend_minmax");if(null!==b){if(a===THREE.MinEquation)return b.MIN_EXT;if(a===THREE.MaxEquation)return b.MAX_EXT}return 0}console.log("THREE.WebGLRenderer",THREE.REVISION);a=a||{};var O=void 0!==a.canvas?a.canvas:document.createElement("canvas"),S=void 0!==a.context?a.context:null,X=void 0!==a.precision?a.precision:"highp",Y=void 0!==a.alpha?a.alpha:!1,la=void 0!==a.depth?a.depth:!0,ma=void 0!==a.stencil? +a.stencil:!0,ya=void 0!==a.antialias?a.antialias:!1,P=void 0!==a.premultipliedAlpha?a.premultipliedAlpha:!0,Ga=void 0!==a.preserveDrawingBuffer?a.preserveDrawingBuffer:!1,Fa=void 0!==a.logarithmicDepthBuffer?a.logarithmicDepthBuffer:!1,za=new THREE.Color(0),bb=0,cb=[],ob={},jb=[],Jb=[],Ib=[],yb=[],Ra=[];this.domElement=O;this.context=null;this.devicePixelRatio=void 0!==a.devicePixelRatio?a.devicePixelRatio:void 0!==self.devicePixelRatio?self.devicePixelRatio:1;this.sortObjects=this.autoClearStencil= +this.autoClearDepth=this.autoClearColor=this.autoClear=!0;this.shadowMapEnabled=this.gammaOutput=this.gammaInput=!1;this.shadowMapType=THREE.PCFShadowMap;this.shadowMapCullFace=THREE.CullFaceFront;this.shadowMapCascade=this.shadowMapDebug=!1;this.maxMorphTargets=8;this.maxMorphNormals=4;this.autoScaleCubemaps=!0;this.info={memory:{programs:0,geometries:0,textures:0},render:{calls:0,vertices:0,faces:0,points:0}};var J=this,hb=[],tc=null,Tc=null,Kb=-1,Oa=-1,ec=null,dc=0,Lb=-1,Mb=-1,pb=-1,Nb=-1,Ob=-1, +Xb=-1,Yb=-1,nb=-1,Qc=null,Rc=null,Sc=null,Pc=null,Pb=0,kc=0,lc=O.width,mc=O.height,Uc=0,Vc=0,wb=new Uint8Array(16),ib=new Uint8Array(16),Ec=new THREE.Frustum,Ac=new THREE.Matrix4,Gc=new THREE.Matrix4,Na=new THREE.Vector3,sa=new THREE.Vector3,fc=!0,Mc={ambient:[0,0,0],directional:{length:0,colors:[],positions:[]},point:{length:0,colors:[],positions:[],distances:[]},spot:{length:0,colors:[],positions:[],distances:[],directions:[],anglesCos:[],exponents:[]},hemi:{length:0,skyColors:[],groundColors:[], +positions:[]}},l;try{var Wc={alpha:Y,depth:la,stencil:ma,antialias:ya,premultipliedAlpha:P,preserveDrawingBuffer:Ga};l=S||O.getContext("webgl",Wc)||O.getContext("experimental-webgl",Wc);if(null===l){if(null!==O.getContext("webgl"))throw"Error creating WebGL context with your selected attributes.";throw"Error creating WebGL context.";}}catch(ad){console.error(ad)}void 0===l.getShaderPrecisionFormat&&(l.getShaderPrecisionFormat=function(){return{rangeMin:1,rangeMax:1,precision:1}});var pa=new THREE.WebGLExtensions(l); +pa.get("OES_texture_float");pa.get("OES_texture_float_linear");pa.get("OES_standard_derivatives");Fa&&pa.get("EXT_frag_depth");l.clearColor(0,0,0,1);l.clearDepth(1);l.clearStencil(0);l.enable(l.DEPTH_TEST);l.depthFunc(l.LEQUAL);l.frontFace(l.CCW);l.cullFace(l.BACK);l.enable(l.CULL_FACE);l.enable(l.BLEND);l.blendEquation(l.FUNC_ADD);l.blendFunc(l.SRC_ALPHA,l.ONE_MINUS_SRC_ALPHA);l.viewport(Pb,kc,lc,mc);l.clearColor(za.r,za.g,za.b,bb);this.context=l;var Oc=l.getParameter(l.MAX_TEXTURE_IMAGE_UNITS), +bd=l.getParameter(l.MAX_VERTEX_TEXTURE_IMAGE_UNITS),cd=l.getParameter(l.MAX_TEXTURE_SIZE),$c=l.getParameter(l.MAX_CUBE_MAP_TEXTURE_SIZE),sc=0b;b++)l.deleteFramebuffer(a.__webglFramebuffer[b]),l.deleteRenderbuffer(a.__webglRenderbuffer[b]); +else l.deleteFramebuffer(a.__webglFramebuffer),l.deleteRenderbuffer(a.__webglRenderbuffer);delete a.__webglFramebuffer;delete a.__webglRenderbuffer}J.info.memory.textures--},Dc=function(a){a=a.target;a.removeEventListener("dispose",Dc);Cc(a)},Yc=function(a){for(var b="__webglVertexBuffer __webglNormalBuffer __webglTangentBuffer __webglColorBuffer __webglUVBuffer __webglUV2Buffer __webglSkinIndicesBuffer __webglSkinWeightsBuffer __webglFaceBuffer __webglLineBuffer __webglLineDistanceBuffer".split(" "), +c=0,d=b.length;cd.numSupportedMorphTargets?(n.sort(p),n.length=d.numSupportedMorphTargets):n.length>d.numSupportedMorphNormals?n.sort(p):0===n.length&&n.push([0, +0]);for(m=0;mf;f++){a.__webglFramebuffer[f]=l.createFramebuffer();a.__webglRenderbuffer[f]=l.createRenderbuffer();l.texImage2D(l.TEXTURE_CUBE_MAP_POSITIVE_X+f,0,d,a.width,a.height,0,d,e,null);var g=a,h=l.TEXTURE_CUBE_MAP_POSITIVE_X+f;l.bindFramebuffer(l.FRAMEBUFFER,a.__webglFramebuffer[f]);l.framebufferTexture2D(l.FRAMEBUFFER,l.COLOR_ATTACHMENT0,h,g.__webglTexture,0);H(a.__webglRenderbuffer[f],a)}c&&l.generateMipmap(l.TEXTURE_CUBE_MAP)}else a.__webglFramebuffer= +l.createFramebuffer(),a.__webglRenderbuffer=a.shareDepthFrom?a.shareDepthFrom.__webglRenderbuffer:l.createRenderbuffer(),l.bindTexture(l.TEXTURE_2D,a.__webglTexture),F(l.TEXTURE_2D,a,c),l.texImage2D(l.TEXTURE_2D,0,d,a.width,a.height,0,d,e,null),d=l.TEXTURE_2D,l.bindFramebuffer(l.FRAMEBUFFER,a.__webglFramebuffer),l.framebufferTexture2D(l.FRAMEBUFFER,l.COLOR_ATTACHMENT0,d,a.__webglTexture,0),a.shareDepthFrom?a.depthBuffer&&!a.stencilBuffer?l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_ATTACHMENT, +l.RENDERBUFFER,a.__webglRenderbuffer):a.depthBuffer&&a.stencilBuffer&&l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_STENCIL_ATTACHMENT,l.RENDERBUFFER,a.__webglRenderbuffer):H(a.__webglRenderbuffer,a),c&&l.generateMipmap(l.TEXTURE_2D);b?l.bindTexture(l.TEXTURE_CUBE_MAP,null):l.bindTexture(l.TEXTURE_2D,null);l.bindRenderbuffer(l.RENDERBUFFER,null);l.bindFramebuffer(l.FRAMEBUFFER,null)}a?(b=b?a.__webglFramebuffer[a.activeCubeFace]:a.__webglFramebuffer,c=a.width,a=a.height,e=d=0):(b=null,c=lc,a=mc, +d=Pb,e=kc);b!==Tc&&(l.bindFramebuffer(l.FRAMEBUFFER,b),l.viewport(d,e,c,a),Tc=b);Uc=c;Vc=a};this.initMaterial=function(){console.warn("THREE.WebGLRenderer: .initMaterial() has been removed.")};this.addPrePlugin=function(){console.warn("THREE.WebGLRenderer: .addPrePlugin() has been removed.")};this.addPostPlugin=function(){console.warn("THREE.WebGLRenderer: .addPostPlugin() has been removed.")};this.updateShadowMap=function(){console.warn("THREE.WebGLRenderer: .updateShadowMap() has been removed.")}}; +THREE.WebGLRenderTarget=function(a,b,c){this.width=a;this.height=b;c=c||{};this.wrapS=void 0!==c.wrapS?c.wrapS:THREE.ClampToEdgeWrapping;this.wrapT=void 0!==c.wrapT?c.wrapT:THREE.ClampToEdgeWrapping;this.magFilter=void 0!==c.magFilter?c.magFilter:THREE.LinearFilter;this.minFilter=void 0!==c.minFilter?c.minFilter:THREE.LinearMipMapLinearFilter;this.anisotropy=void 0!==c.anisotropy?c.anisotropy:1;this.offset=new THREE.Vector2(0,0);this.repeat=new THREE.Vector2(1,1);this.format=void 0!==c.format?c.format: +THREE.RGBAFormat;this.type=void 0!==c.type?c.type:THREE.UnsignedByteType;this.depthBuffer=void 0!==c.depthBuffer?c.depthBuffer:!0;this.stencilBuffer=void 0!==c.stencilBuffer?c.stencilBuffer:!0;this.generateMipmaps=!0;this.shareDepthFrom=null}; +THREE.WebGLRenderTarget.prototype={constructor:THREE.WebGLRenderTarget,setSize:function(a,b){this.width=a;this.height=b},clone:function(){var a=new THREE.WebGLRenderTarget(this.width,this.height);a.wrapS=this.wrapS;a.wrapT=this.wrapT;a.magFilter=this.magFilter;a.minFilter=this.minFilter;a.anisotropy=this.anisotropy;a.offset.copy(this.offset);a.repeat.copy(this.repeat);a.format=this.format;a.type=this.type;a.depthBuffer=this.depthBuffer;a.stencilBuffer=this.stencilBuffer;a.generateMipmaps=this.generateMipmaps; +a.shareDepthFrom=this.shareDepthFrom;return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.WebGLRenderTarget.prototype);THREE.WebGLRenderTargetCube=function(a,b,c){THREE.WebGLRenderTarget.call(this,a,b,c);this.activeCubeFace=0};THREE.WebGLRenderTargetCube.prototype=Object.create(THREE.WebGLRenderTarget.prototype); +THREE.WebGLExtensions=function(a){var b={};this.get=function(c){if(void 0!==b[c])return b[c];var d;switch(c){case "OES_texture_float":d=a.getExtension("OES_texture_float");break;case "OES_texture_float_linear":d=a.getExtension("OES_texture_float_linear");break;case "OES_standard_derivatives":d=a.getExtension("OES_standard_derivatives");break;case "EXT_texture_filter_anisotropic":d=a.getExtension("EXT_texture_filter_anisotropic")||a.getExtension("MOZ_EXT_texture_filter_anisotropic")||a.getExtension("WEBKIT_EXT_texture_filter_anisotropic"); +break;case "WEBGL_compressed_texture_s3tc":d=a.getExtension("WEBGL_compressed_texture_s3tc")||a.getExtension("MOZ_WEBGL_compressed_texture_s3tc")||a.getExtension("WEBKIT_WEBGL_compressed_texture_s3tc");break;case "WEBGL_compressed_texture_pvrtc":d=a.getExtension("WEBGL_compressed_texture_pvrtc")||a.getExtension("WEBKIT_WEBGL_compressed_texture_pvrtc");break;case "OES_element_index_uint":d=a.getExtension("OES_element_index_uint");break;case "EXT_blend_minmax":d=a.getExtension("EXT_blend_minmax");break; +case "EXT_frag_depth":d=a.getExtension("EXT_frag_depth")}null===d&&console.log("THREE.WebGLRenderer: "+c+" extension not supported.");return b[c]=d}}; +THREE.WebGLProgram=function(){var a=0;return function(b,c,d,e){var f=b.context,g=d.defines,h=d.__webglShader.uniforms,k=d.attributes,n=d.__webglShader.vertexShader,p=d.__webglShader.fragmentShader,q=d.index0AttributeName;void 0===q&&!0===e.morphTargets&&(q="position");var m="SHADOWMAP_TYPE_BASIC";e.shadowMapType===THREE.PCFShadowMap?m="SHADOWMAP_TYPE_PCF":e.shadowMapType===THREE.PCFSoftShadowMap&&(m="SHADOWMAP_TYPE_PCF_SOFT");var r,t;r=[];for(var s in g)t=g[s],!1!==t&&(t="#define "+s+" "+t,r.push(t)); +r=r.join("\n");g=f.createProgram();d instanceof THREE.RawShaderMaterial?b=d="":(d=["precision "+e.precision+" float;","precision "+e.precision+" int;",r,e.supportsVertexTextures?"#define VERTEX_TEXTURES":"",b.gammaInput?"#define GAMMA_INPUT":"",b.gammaOutput?"#define GAMMA_OUTPUT":"","#define MAX_DIR_LIGHTS "+e.maxDirLights,"#define MAX_POINT_LIGHTS "+e.maxPointLights,"#define MAX_SPOT_LIGHTS "+e.maxSpotLights,"#define MAX_HEMI_LIGHTS "+e.maxHemiLights,"#define MAX_SHADOWS "+e.maxShadows,"#define MAX_BONES "+ +e.maxBones,e.map?"#define USE_MAP":"",e.envMap?"#define USE_ENVMAP":"",e.lightMap?"#define USE_LIGHTMAP":"",e.bumpMap?"#define USE_BUMPMAP":"",e.normalMap?"#define USE_NORMALMAP":"",e.specularMap?"#define USE_SPECULARMAP":"",e.alphaMap?"#define USE_ALPHAMAP":"",e.vertexColors?"#define USE_COLOR":"",e.skinning?"#define USE_SKINNING":"",e.useVertexTexture?"#define BONE_TEXTURE":"",e.morphTargets?"#define USE_MORPHTARGETS":"",e.morphNormals?"#define USE_MORPHNORMALS":"",e.wrapAround?"#define WRAP_AROUND": +"",e.doubleSided?"#define DOUBLE_SIDED":"",e.flipSided?"#define FLIP_SIDED":"",e.shadowMapEnabled?"#define USE_SHADOWMAP":"",e.shadowMapEnabled?"#define "+m:"",e.shadowMapDebug?"#define SHADOWMAP_DEBUG":"",e.shadowMapCascade?"#define SHADOWMAP_CASCADE":"",e.sizeAttenuation?"#define USE_SIZEATTENUATION":"",e.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"","uniform mat4 modelMatrix;\nuniform mat4 modelViewMatrix;\nuniform mat4 projectionMatrix;\nuniform mat4 viewMatrix;\nuniform mat3 normalMatrix;\nuniform vec3 cameraPosition;\nattribute vec3 position;\nattribute vec3 normal;\nattribute vec2 uv;\nattribute vec2 uv2;\n#ifdef USE_COLOR\n\tattribute vec3 color;\n#endif\n#ifdef USE_MORPHTARGETS\n\tattribute vec3 morphTarget0;\n\tattribute vec3 morphTarget1;\n\tattribute vec3 morphTarget2;\n\tattribute vec3 morphTarget3;\n\t#ifdef USE_MORPHNORMALS\n\t\tattribute vec3 morphNormal0;\n\t\tattribute vec3 morphNormal1;\n\t\tattribute vec3 morphNormal2;\n\t\tattribute vec3 morphNormal3;\n\t#else\n\t\tattribute vec3 morphTarget4;\n\t\tattribute vec3 morphTarget5;\n\t\tattribute vec3 morphTarget6;\n\t\tattribute vec3 morphTarget7;\n\t#endif\n#endif\n#ifdef USE_SKINNING\n\tattribute vec4 skinIndex;\n\tattribute vec4 skinWeight;\n#endif\n"].join("\n"), +b=["precision "+e.precision+" float;","precision "+e.precision+" int;",e.bumpMap||e.normalMap?"#extension GL_OES_standard_derivatives : enable":"",r,"#define MAX_DIR_LIGHTS "+e.maxDirLights,"#define MAX_POINT_LIGHTS "+e.maxPointLights,"#define MAX_SPOT_LIGHTS "+e.maxSpotLights,"#define MAX_HEMI_LIGHTS "+e.maxHemiLights,"#define MAX_SHADOWS "+e.maxShadows,e.alphaTest?"#define ALPHATEST "+e.alphaTest:"",b.gammaInput?"#define GAMMA_INPUT":"",b.gammaOutput?"#define GAMMA_OUTPUT":"",e.useFog&&e.fog?"#define USE_FOG": +"",e.useFog&&e.fogExp?"#define FOG_EXP2":"",e.map?"#define USE_MAP":"",e.envMap?"#define USE_ENVMAP":"",e.lightMap?"#define USE_LIGHTMAP":"",e.bumpMap?"#define USE_BUMPMAP":"",e.normalMap?"#define USE_NORMALMAP":"",e.specularMap?"#define USE_SPECULARMAP":"",e.alphaMap?"#define USE_ALPHAMAP":"",e.vertexColors?"#define USE_COLOR":"",e.metal?"#define METAL":"",e.wrapAround?"#define WRAP_AROUND":"",e.doubleSided?"#define DOUBLE_SIDED":"",e.flipSided?"#define FLIP_SIDED":"",e.shadowMapEnabled?"#define USE_SHADOWMAP": +"",e.shadowMapEnabled?"#define "+m:"",e.shadowMapDebug?"#define SHADOWMAP_DEBUG":"",e.shadowMapCascade?"#define SHADOWMAP_CASCADE":"",e.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"","uniform mat4 viewMatrix;\nuniform vec3 cameraPosition;\n"].join("\n"));n=new THREE.WebGLShader(f,f.VERTEX_SHADER,d+n);p=new THREE.WebGLShader(f,f.FRAGMENT_SHADER,b+p);f.attachShader(g,n);f.attachShader(g,p);void 0!==q&&f.bindAttribLocation(g,0,q);f.linkProgram(g);!1===f.getProgramParameter(g,f.LINK_STATUS)&&(console.error("THREE.WebGLProgram: Could not initialise shader."), +console.error("gl.VALIDATE_STATUS",f.getProgramParameter(g,f.VALIDATE_STATUS)),console.error("gl.getError()",f.getError()));""!==f.getProgramInfoLog(g)&&console.warn("THREE.WebGLProgram: gl.getProgramInfoLog()",f.getProgramInfoLog(g));f.deleteShader(n);f.deleteShader(p);q="viewMatrix modelViewMatrix projectionMatrix normalMatrix modelMatrix cameraPosition morphTargetInfluences bindMatrix bindMatrixInverse".split(" ");e.useVertexTexture?(q.push("boneTexture"),q.push("boneTextureWidth"),q.push("boneTextureHeight")): +q.push("boneGlobalMatrices");e.logarithmicDepthBuffer&&q.push("logDepthBufFC");for(var u in h)q.push(u);h=q;u={};q=0;for(b=h.length;qT;T++)F[T]=new THREE.Vector3,A[T]=new THREE.Vector3;F=B.shadowCascadeNearZ[C];B=B.shadowCascadeFarZ[C];A[0].set(-1,-1,F);A[1].set(1,-1,F);A[2].set(-1,1,F);A[3].set(1,1,F);A[4].set(-1,-1,B);A[5].set(1,-1,B);A[6].set(-1,1,B);A[7].set(1,1,B);H.originalCamera=v;A=new THREE.Gyroscope;A.position.copy(x.shadowCascadeOffset);A.add(H);A.add(H.target);v.add(A);x.shadowCascadeArray[E]=H;console.log("Created virtualLight",H)}C= +x;F=E;B=C.shadowCascadeArray[F];B.position.copy(C.position);B.target.position.copy(C.target.position);B.lookAt(B.target);B.shadowCameraVisible=C.shadowCameraVisible;B.shadowDarkness=C.shadowDarkness;B.shadowBias=C.shadowCascadeBias[F];A=C.shadowCascadeNearZ[F];C=C.shadowCascadeFarZ[F];B=B.pointsFrustum;B[0].z=A;B[1].z=A;B[2].z=A;B[3].z=A;B[4].z=C;B[5].z=C;B[6].z=C;B[7].z=C;R[D]=H;D++}else R[D]=x,D++;u=0;for(K=R.length;uC;C++)F=B[C],F.copy(A[C]),F.unproject(E),F.applyMatrix4(D.matrixWorldInverse),F.xr.x&&(r.x=F.x),F.yr.y&&(r.y=F.y),F.zr.z&&(r.z=F.z);D.left=m.x;D.right=r.x;D.top=r.y;D.bottom=m.y;D.updateProjectionMatrix()}D=x.shadowMap;A=x.shadowMatrix;E=x.shadowCamera;E.position.setFromMatrixPosition(x.matrixWorld);t.setFromMatrixPosition(x.target.matrixWorld);E.lookAt(t);E.updateMatrixWorld();E.matrixWorldInverse.getInverse(E.matrixWorld);x.cameraHelper&& +(x.cameraHelper.visible=x.shadowCameraVisible);x.shadowCameraVisible&&x.cameraHelper.update();A.set(.5,0,0,.5,0,.5,0,.5,0,0,.5,.5,0,0,0,1);A.multiply(E.projectionMatrix);A.multiply(E.matrixWorldInverse);q.multiplyMatrices(E.projectionMatrix,E.matrixWorldInverse);p.setFromMatrix(q);a.setRenderTarget(D);a.clear();s.length=0;e(c,c,E);x=0;for(D=s.length;x 0 ) {\nfloat depth = gl_FragCoord.z / gl_FragCoord.w;\nfloat fogFactor = 0.0;\nif ( fogType == 1 ) {\nfogFactor = smoothstep( fogNear, fogFar, depth );\n} else {\nconst float LOG2 = 1.442695;\nfloat fogFactor = exp2( - fogDensity * fogDensity * depth * depth * LOG2 );\nfogFactor = 1.0 - clamp( fogFactor, 0.0, 1.0 );\n}\ngl_FragColor = mix( gl_FragColor, vec4( fogColor, gl_FragColor.w ), fogFactor );\n}\n}"].join("\n")); +w.compileShader(R);w.compileShader(H);w.attachShader(F,R);w.attachShader(F,H);w.linkProgram(F);D=F;v=w.getAttribLocation(D,"position");y=w.getAttribLocation(D,"uv");c=w.getUniformLocation(D,"uvOffset");d=w.getUniformLocation(D,"uvScale");e=w.getUniformLocation(D,"rotation");f=w.getUniformLocation(D,"scale");g=w.getUniformLocation(D,"color");h=w.getUniformLocation(D,"map");k=w.getUniformLocation(D,"opacity");n=w.getUniformLocation(D,"modelViewMatrix");p=w.getUniformLocation(D,"projectionMatrix");q= +w.getUniformLocation(D,"fogType");m=w.getUniformLocation(D,"fogDensity");r=w.getUniformLocation(D,"fogNear");t=w.getUniformLocation(D,"fogFar");s=w.getUniformLocation(D,"fogColor");u=w.getUniformLocation(D,"alphaTest");F=document.createElement("canvas");F.width=8;F.height=8;R=F.getContext("2d");R.fillStyle="white";R.fillRect(0,0,8,8);E=new THREE.Texture(F);E.needsUpdate=!0}w.useProgram(D);w.enableVertexAttribArray(v);w.enableVertexAttribArray(y);w.disable(w.CULL_FACE);w.enable(w.BLEND);w.bindBuffer(w.ARRAY_BUFFER, +K);w.vertexAttribPointer(v,2,w.FLOAT,!1,16,0);w.vertexAttribPointer(y,2,w.FLOAT,!1,16,8);w.bindBuffer(w.ELEMENT_ARRAY_BUFFER,x);w.uniformMatrix4fv(p,!1,B.projectionMatrix.elements);w.activeTexture(w.TEXTURE0);w.uniform1i(h,0);R=F=0;(H=A.fog)?(w.uniform3f(s,H.color.r,H.color.g,H.color.b),H instanceof THREE.Fog?(w.uniform1f(r,H.near),w.uniform1f(t,H.far),w.uniform1i(q,1),R=F=1):H instanceof THREE.FogExp2&&(w.uniform1f(m,H.density),w.uniform1i(q,2),R=F=2)):(w.uniform1i(q,0),R=F=0);for(var H=0,C=b.length;H< +C;H++){var T=b[H];T._modelViewMatrix.multiplyMatrices(B.matrixWorldInverse,T.matrixWorld);T.z=null===T.renderDepth?-T._modelViewMatrix.elements[14]:T.renderDepth}b.sort(G);for(var Q=[],H=0,C=b.length;Hq-1?0:q-1,r=q+1>e-1?e-1:q+1,t=0>p-1?0:p-1,s=p+1>d-1?d-1:p+1,u=[],v=[0,0,h[4*(q*d+p)]/255*b];u.push([-1,0,h[4*(q*d+t)]/255*b]);u.push([-1,-1,h[4*(m*d+t)]/255*b]);u.push([0,-1,h[4*(m*d+p)]/255*b]);u.push([1,-1,h[4*(m*d+s)]/255*b]);u.push([1,0,h[4*(q*d+s)]/255*b]);u.push([1,1,h[4*(r*d+s)]/255*b]);u.push([0,1,h[4*(r*d+p)]/255* +b]);u.push([-1,1,h[4*(r*d+t)]/255*b]);m=[];t=u.length;for(r=0;re)return null;var f=[],g=[],h=[],k,n,p;if(0=q--){console.log("Warning, unable to triangulate polygon!");break}k=n;e<=k&&(k=0);n=k+1;e<=n&&(n=0);p=n+1;e<=p&&(p=0);var m;a:{var r=m=void 0,t=void 0,s=void 0,u=void 0,v=void 0,y=void 0,G=void 0,w=void 0, +r=a[g[k]].x,t=a[g[k]].y,s=a[g[n]].x,u=a[g[n]].y,v=a[g[p]].x,y=a[g[p]].y;if(1E-10>(s-r)*(y-t)-(u-t)*(v-r))m=!1;else{var K=void 0,x=void 0,D=void 0,E=void 0,A=void 0,B=void 0,F=void 0,R=void 0,H=void 0,C=void 0,H=R=F=w=G=void 0,K=v-s,x=y-u,D=r-v,E=t-y,A=s-r,B=u-t;for(m=0;mk)g=d+1;else if(0b&&(b=0);1=b)return b=c[a]-b,a=this.curves[a],b=1-b/a.getLength(),a.getPointAt(b);a++}return null};THREE.CurvePath.prototype.getLength=function(){var a=this.getCurveLengths();return a[a.length-1]}; +THREE.CurvePath.prototype.getCurveLengths=function(){if(this.cacheLengths&&this.cacheLengths.length==this.curves.length)return this.cacheLengths;var a=[],b=0,c,d=this.curves.length;for(c=0;cb?b=h.x:h.xc?c=h.y:h.yd?d=h.z:h.zMath.abs(d.x-c[0].x)&&1E-10>Math.abs(d.y-c[0].y)&&c.splice(c.length-1,1);b&&c.push(c[0]);return c}; +THREE.Path.prototype.toShapes=function(a,b){function c(a){for(var b=[],c=0,d=a.length;cm&&(g=b[f],k=-k,h=b[e],m=-m),!(a.yh.y))if(a.y==g.y){if(a.x==g.x)return!0}else{e=m*(a.x-g.x)-k*(a.y-g.y);if(0==e)return!0;0>e||(d=!d)}}else if(a.y==g.y&&(h.x<=a.x&&a.x<=g.x||g.x<=a.x&&a.x<= +h.x))return!0}return d}var e=function(a){var b,c,d,e,f=[],g=new THREE.Path;b=0;for(c=a.length;bE||E>D)return[];k=n*p-k*q;if(0>k||k>D)return[]}else{if(0d?[]:k==d?f?[]:[g]:a<=d?[g,h]: +[g,n]}function e(a,b,c,d){var e=b.x-a.x,f=b.y-a.y;b=c.x-a.x;c=c.y-a.y;var g=d.x-a.x;d=d.y-a.y;a=e*c-f*b;e=e*d-f*g;return 1E-10f&&(f=d);var g=a+1;g>d&&(g=0);d=e(h[a],h[f],h[g],k[b]);if(!d)return!1; +d=k.length-1;f=b-1;0>f&&(f=d);g=b+1;g>d&&(g=0);return(d=e(k[b],k[f],k[g],h[a]))?!0:!1}function f(a,b){var c,e;for(c=0;cC){console.log("Infinite Loop! Holes left:"+ +n.length+", Probably Hole outside Shape!");break}for(q=B;qh;h++)n=k[h].x+":"+k[h].y, +n=p[n],void 0!==n&&(k[h]=n);return q.concat()},isClockWise:function(a){return 0>THREE.FontUtils.Triangulate.area(a)},b2p0:function(a,b){var c=1-a;return c*c*b},b2p1:function(a,b){return 2*(1-a)*a*b},b2p2:function(a,b){return a*a*b},b2:function(a,b,c,d){return this.b2p0(a,b)+this.b2p1(a,c)+this.b2p2(a,d)},b3p0:function(a,b){var c=1-a;return c*c*c*b},b3p1:function(a,b){var c=1-a;return 3*c*c*a*b},b3p2:function(a,b){return 3*(1-a)*a*a*b},b3p3:function(a,b){return a*a*a*b},b3:function(a,b,c,d,e){return this.b3p0(a, +b)+this.b3p1(a,c)+this.b3p2(a,d)+this.b3p3(a,e)}};THREE.LineCurve=function(a,b){this.v1=a;this.v2=b};THREE.LineCurve.prototype=Object.create(THREE.Curve.prototype);THREE.LineCurve.prototype.getPoint=function(a){var b=this.v2.clone().sub(this.v1);b.multiplyScalar(a).add(this.v1);return b};THREE.LineCurve.prototype.getPointAt=function(a){return this.getPoint(a)};THREE.LineCurve.prototype.getTangent=function(a){return this.v2.clone().sub(this.v1).normalize()}; +THREE.QuadraticBezierCurve=function(a,b,c){this.v0=a;this.v1=b;this.v2=c};THREE.QuadraticBezierCurve.prototype=Object.create(THREE.Curve.prototype);THREE.QuadraticBezierCurve.prototype.getPoint=function(a){var b=new THREE.Vector2;b.x=THREE.Shape.Utils.b2(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Shape.Utils.b2(a,this.v0.y,this.v1.y,this.v2.y);return b}; +THREE.QuadraticBezierCurve.prototype.getTangent=function(a){var b=new THREE.Vector2;b.x=THREE.Curve.Utils.tangentQuadraticBezier(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Curve.Utils.tangentQuadraticBezier(a,this.v0.y,this.v1.y,this.v2.y);return b.normalize()};THREE.CubicBezierCurve=function(a,b,c,d){this.v0=a;this.v1=b;this.v2=c;this.v3=d};THREE.CubicBezierCurve.prototype=Object.create(THREE.Curve.prototype); +THREE.CubicBezierCurve.prototype.getPoint=function(a){var b;b=THREE.Shape.Utils.b3(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);a=THREE.Shape.Utils.b3(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);return new THREE.Vector2(b,a)};THREE.CubicBezierCurve.prototype.getTangent=function(a){var b;b=THREE.Curve.Utils.tangentCubicBezier(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);a=THREE.Curve.Utils.tangentCubicBezier(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);b=new THREE.Vector2(b,a);b.normalize();return b}; +THREE.SplineCurve=function(a){this.points=void 0==a?[]:a};THREE.SplineCurve.prototype=Object.create(THREE.Curve.prototype);THREE.SplineCurve.prototype.getPoint=function(a){var b=this.points;a*=b.length-1;var c=Math.floor(a);a-=c;var d=b[0==c?c:c-1],e=b[c],f=b[c>b.length-2?b.length-1:c+1],b=b[c>b.length-3?b.length-1:c+2],c=new THREE.Vector2;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);return c}; +THREE.EllipseCurve=function(a,b,c,d,e,f,g){this.aX=a;this.aY=b;this.xRadius=c;this.yRadius=d;this.aStartAngle=e;this.aEndAngle=f;this.aClockwise=g};THREE.EllipseCurve.prototype=Object.create(THREE.Curve.prototype); +THREE.EllipseCurve.prototype.getPoint=function(a){var b=this.aEndAngle-this.aStartAngle;0>b&&(b+=2*Math.PI);b>2*Math.PI&&(b-=2*Math.PI);a=!0===this.aClockwise?this.aEndAngle+(1-a)*(2*Math.PI-b):this.aStartAngle+a*b;b=new THREE.Vector2;b.x=this.aX+this.xRadius*Math.cos(a);b.y=this.aY+this.yRadius*Math.sin(a);return b};THREE.ArcCurve=function(a,b,c,d,e,f){THREE.EllipseCurve.call(this,a,b,c,c,d,e,f)};THREE.ArcCurve.prototype=Object.create(THREE.EllipseCurve.prototype); +THREE.LineCurve3=THREE.Curve.create(function(a,b){this.v1=a;this.v2=b},function(a){var b=new THREE.Vector3;b.subVectors(this.v2,this.v1);b.multiplyScalar(a);b.add(this.v1);return b});THREE.QuadraticBezierCurve3=THREE.Curve.create(function(a,b,c){this.v0=a;this.v1=b;this.v2=c},function(a){var b=new THREE.Vector3;b.x=THREE.Shape.Utils.b2(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Shape.Utils.b2(a,this.v0.y,this.v1.y,this.v2.y);b.z=THREE.Shape.Utils.b2(a,this.v0.z,this.v1.z,this.v2.z);return b}); +THREE.CubicBezierCurve3=THREE.Curve.create(function(a,b,c,d){this.v0=a;this.v1=b;this.v2=c;this.v3=d},function(a){var b=new THREE.Vector3;b.x=THREE.Shape.Utils.b3(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);b.y=THREE.Shape.Utils.b3(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);b.z=THREE.Shape.Utils.b3(a,this.v0.z,this.v1.z,this.v2.z,this.v3.z);return b}); +THREE.SplineCurve3=THREE.Curve.create(function(a){this.points=void 0==a?[]:a},function(a){var b=this.points;a*=b.length-1;var c=Math.floor(a);a-=c;var d=b[0==c?c:c-1],e=b[c],f=b[c>b.length-2?b.length-1:c+1],b=b[c>b.length-3?b.length-1:c+2],c=new THREE.Vector3;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);c.z=THREE.Curve.Utils.interpolate(d.z,e.z,f.z,b.z,a);return c}); +THREE.ClosedSplineCurve3=THREE.Curve.create(function(a){this.points=void 0==a?[]:a},function(a){var b=this.points;a*=b.length-0;var c=Math.floor(a);a-=c;var c=c+(0a.hierarchy[b].keys[c].time&&(a.hierarchy[b].keys[c].time= +0),void 0!==a.hierarchy[b].keys[c].rot&&!(a.hierarchy[b].keys[c].rot instanceof THREE.Quaternion)){var d=a.hierarchy[b].keys[c].rot;a.hierarchy[b].keys[c].rot=(new THREE.Quaternion).fromArray(d)}if(a.hierarchy[b].keys.length&&void 0!==a.hierarchy[b].keys[0].morphTargets){d={};for(c=0;cd;d++){for(var e=this.keyTypes[d],f=this.data.hierarchy[a].keys[0],g=this.getNextKeyWith(e,a,1);g.timef.index;)f=g,g=this.getNextKeyWith(e,a,g.index+1);c.prevKey[e]=f;c.nextKey[e]=g}}}; +THREE.Animation.prototype.resetBlendWeights=function(){for(var a=0,b=this.hierarchy.length;aa.length-2?q:q+1;c[3]=q>a.length-3?q:q+2;q=a[c[0]];r=a[c[1]];t=a[c[2]];s=a[c[3]];c=e*e;m=e*c;d[0]=f(q[0],r[0],t[0],s[0],e,c,m);d[1]=f(q[1],r[1],t[1],s[1],e,c,m);d[2]=f(q[2],r[2],t[2],s[2],e,c,m);return d},f=function(a,b,c,d,e,f,m){a=.5*(c-a);d=.5*(d-b);return(2*(b-c)+a+d)*m+ +(-3*(b-c)-2*a-d)*f+a*e+b};return function(f){if(!1!==this.isPlaying&&(this.currentTime+=f*this.timeScale,0!==this.weight)){f=this.data.length;if(this.currentTime>f||0>this.currentTime)if(this.loop)this.currentTime%=f,0>this.currentTime&&(this.currentTime+=f),this.reset();else{this.stop();return}f=0;for(var h=this.hierarchy.length;fq;q++){var m=this.keyTypes[q],r=n.prevKey[m],t=n.nextKey[m]; +if(0this.timeScale&&r.time>=this.currentTime){r=this.data.hierarchy[f].keys[0];for(t=this.getNextKeyWith(m,f,1);t.timer.index;)r=t,t=this.getNextKeyWith(m,f,t.index+1);n.prevKey[m]=r;n.nextKey[m]=t}k.matrixAutoUpdate=!0;k.matrixWorldNeedsUpdate=!0;var s=(this.currentTime-r.time)/(t.time-r.time),u=r[m],v=t[m];0>s&&(s=0);1a&&(this.currentTime%=a);this.currentTime=Math.min(this.currentTime,a);a=0;for(var b=this.hierarchy.length;af.index;)f=g,g=e[f.index+1];d.prevKey= +f;d.nextKey=g}g.time>=this.currentTime?f.interpolate(g,this.currentTime):f.interpolate(g,g.time);this.data.hierarchy[a].node.updateMatrix();c.matrixWorldNeedsUpdate=!0}}}};THREE.KeyFrameAnimation.prototype.getNextKeyWith=function(a,b,c){b=this.data.hierarchy[b].keys;for(c%=b.length;cthis.duration&&(this.currentTime%=this.duration);this.currentTime=Math.min(this.currentTime,this.duration);c=this.duration/this.frames;var d=Math.floor(this.currentTime/c);d!=b&&(this.mesh.morphTargetInfluences[a]=0,this.mesh.morphTargetInfluences[b]=1,this.mesh.morphTargetInfluences[d]= +0,a=b,b=d);this.mesh.morphTargetInfluences[d]=this.currentTime%c/c;this.mesh.morphTargetInfluences[a]=1-this.mesh.morphTargetInfluences[d]}}}()}; +THREE.BoxGeometry=function(a,b,c,d,e,f){function g(a,b,c,d,e,f,g,s){var u,v=h.widthSegments,y=h.heightSegments,G=e/2,w=f/2,K=h.vertices.length;if("x"===a&&"y"===b||"y"===a&&"x"===b)u="z";else if("x"===a&&"z"===b||"z"===a&&"x"===b)u="y",y=h.depthSegments;else if("z"===a&&"y"===b||"y"===a&&"z"===b)u="x",v=h.depthSegments;var x=v+1,D=y+1,E=e/v,A=f/y,B=new THREE.Vector3;B[u]=0=d)return new THREE.Vector2(c,a);d=Math.sqrt(d/2)}else a=!1,1E-10d?-1E-10>f&&(a=!0):Math.sign(e)== +Math.sign(g)&&(a=!0),a?(c=-e,a=d,d=Math.sqrt(h)):(c=d,a=e,d=Math.sqrt(h/2));return new THREE.Vector2(c/d,a/d)}function e(a,b){var c,d;for(P=a.length;0<=--P;){c=P;d=P-1;0>d&&(d=a.length-1);for(var e=0,f=r+2*p,e=0;eMath.abs(b.y-c.y)?[new THREE.Vector2(b.x,1-b.z),new THREE.Vector2(c.x,1-c.z),new THREE.Vector2(d.x,1-d.z),new THREE.Vector2(e.x,1-e.z)]:[new THREE.Vector2(b.y,1-b.z),new THREE.Vector2(c.y,1-c.z),new THREE.Vector2(d.y, +1-d.z),new THREE.Vector2(e.y,1-e.z)]}};THREE.ShapeGeometry=function(a,b){THREE.Geometry.call(this);this.type="ShapeGeometry";!1===a instanceof Array&&(a=[a]);this.addShapeList(a,b);this.computeFaceNormals()};THREE.ShapeGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.ShapeGeometry.prototype.addShapeList=function(a,b){for(var c=0,d=a.length;cc&&1===a.x&&(a=new THREE.Vector2(a.x-1,a.y));0===b.x&&0===b.z&&(a=new THREE.Vector2(c/2/Math.PI+.5, +a.y));return a.clone()}THREE.Geometry.call(this);this.type="PolyhedronGeometry";this.parameters={vertices:a,indices:b,radius:c,detail:d};c=c||1;d=d||0;for(var k=this,n=0,p=a.length;nr&&(.2>d&&(b[0].x+=1),.2>a&&(b[1].x+=1),.2>q&&(b[2].x+=1));n=0;for(p=this.vertices.length;nc.y?this.quaternion.set(1,0,0,0):(a.set(c.z,0,-c.x).normalize(),b=Math.acos(c.y),this.quaternion.setFromAxisAngle(a,b))}}(); +THREE.ArrowHelper.prototype.setLength=function(a,b,c){void 0===b&&(b=.2*a);void 0===c&&(c=.2*b);this.line.scale.set(1,a,1);this.line.updateMatrix();this.cone.scale.set(c,b,c);this.cone.position.y=a;this.cone.updateMatrix()};THREE.ArrowHelper.prototype.setColor=function(a){this.line.material.color.set(a);this.cone.material.color.set(a)}; +THREE.BoxHelper=function(a){var b=new THREE.BufferGeometry;b.addAttribute("position",new THREE.BufferAttribute(new Float32Array(72),3));THREE.Line.call(this,b,new THREE.LineBasicMaterial({color:16776960}),THREE.LinePieces);void 0!==a&&this.update(a)};THREE.BoxHelper.prototype=Object.create(THREE.Line.prototype); +THREE.BoxHelper.prototype.update=function(a){var b=a.geometry;null===b.boundingBox&&b.computeBoundingBox();var c=b.boundingBox.min,b=b.boundingBox.max,d=this.geometry.attributes.position.array;d[0]=b.x;d[1]=b.y;d[2]=b.z;d[3]=c.x;d[4]=b.y;d[5]=b.z;d[6]=c.x;d[7]=b.y;d[8]=b.z;d[9]=c.x;d[10]=c.y;d[11]=b.z;d[12]=c.x;d[13]=c.y;d[14]=b.z;d[15]=b.x;d[16]=c.y;d[17]=b.z;d[18]=b.x;d[19]=c.y;d[20]=b.z;d[21]=b.x;d[22]=b.y;d[23]=b.z;d[24]=b.x;d[25]=b.y;d[26]=c.z;d[27]=c.x;d[28]=b.y;d[29]=c.z;d[30]=c.x;d[31]=b.y; +d[32]=c.z;d[33]=c.x;d[34]=c.y;d[35]=c.z;d[36]=c.x;d[37]=c.y;d[38]=c.z;d[39]=b.x;d[40]=c.y;d[41]=c.z;d[42]=b.x;d[43]=c.y;d[44]=c.z;d[45]=b.x;d[46]=b.y;d[47]=c.z;d[48]=b.x;d[49]=b.y;d[50]=b.z;d[51]=b.x;d[52]=b.y;d[53]=c.z;d[54]=c.x;d[55]=b.y;d[56]=b.z;d[57]=c.x;d[58]=b.y;d[59]=c.z;d[60]=c.x;d[61]=c.y;d[62]=b.z;d[63]=c.x;d[64]=c.y;d[65]=c.z;d[66]=b.x;d[67]=c.y;d[68]=b.z;d[69]=b.x;d[70]=c.y;d[71]=c.z;this.geometry.attributes.position.needsUpdate=!0;this.geometry.computeBoundingSphere();this.matrix=a.matrixWorld; +this.matrixAutoUpdate=!1};THREE.BoundingBoxHelper=function(a,b){var c=void 0!==b?b:8947848;this.object=a;this.box=new THREE.Box3;THREE.Mesh.call(this,new THREE.BoxGeometry(1,1,1),new THREE.MeshBasicMaterial({color:c,wireframe:!0}))};THREE.BoundingBoxHelper.prototype=Object.create(THREE.Mesh.prototype);THREE.BoundingBoxHelper.prototype.update=function(){this.box.setFromObject(this.object);this.box.size(this.scale);this.box.center(this.position)}; +THREE.CameraHelper=function(a){function b(a,b,d){c(a,d);c(b,d)}function c(a,b){d.vertices.push(new THREE.Vector3);d.colors.push(new THREE.Color(b));void 0===f[a]&&(f[a]=[]);f[a].push(d.vertices.length-1)}var d=new THREE.Geometry,e=new THREE.LineBasicMaterial({color:16777215,vertexColors:THREE.FaceColors}),f={};b("n1","n2",16755200);b("n2","n4",16755200);b("n4","n3",16755200);b("n3","n1",16755200);b("f1","f2",16755200);b("f2","f4",16755200);b("f4","f3",16755200);b("f3","f1",16755200);b("n1","f1",16755200); +b("n2","f2",16755200);b("n3","f3",16755200);b("n4","f4",16755200);b("p","n1",16711680);b("p","n2",16711680);b("p","n3",16711680);b("p","n4",16711680);b("u1","u2",43775);b("u2","u3",43775);b("u3","u1",43775);b("c","t",16777215);b("p","c",3355443);b("cn1","cn2",3355443);b("cn3","cn4",3355443);b("cf1","cf2",3355443);b("cf3","cf4",3355443);THREE.Line.call(this,d,e,THREE.LinePieces);this.camera=a;this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1;this.pointMap=f;this.update()}; +THREE.CameraHelper.prototype=Object.create(THREE.Line.prototype); +THREE.CameraHelper.prototype.update=function(){var a,b,c=new THREE.Vector3,d=new THREE.Camera,e=function(e,g,h,k){c.set(g,h,k).unproject(d);e=b[e];if(void 0!==e)for(g=0,h=e.length;gt;t++){d[0]=r[g[t]];d[1]=r[g[(t+1)%3]];d.sort(f);var s=d.toString();void 0===e[s]?(e[s]={vert1:d[0],vert2:d[1],face1:q,face2:void 0},p++):e[s].face2=q}d=new Float32Array(6*p);f=0;for(s in e)if(g=e[s],void 0===g.face2|| +.9999>k[g.face1].normal.dot(k[g.face2].normal))p=n[g.vert1],d[f++]=p.x,d[f++]=p.y,d[f++]=p.z,p=n[g.vert2],d[f++]=p.x,d[f++]=p.y,d[f++]=p.z;h.addAttribute("position",new THREE.BufferAttribute(d,3));THREE.Line.call(this,h,new THREE.LineBasicMaterial({color:c}),THREE.LinePieces);this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1};THREE.EdgesHelper.prototype=Object.create(THREE.Line.prototype); +THREE.FaceNormalsHelper=function(a,b,c,d){this.object=a;this.size=void 0!==b?b:1;a=void 0!==c?c:16776960;d=void 0!==d?d:1;b=new THREE.Geometry;c=0;for(var e=this.object.geometry.faces.length;cb;b++)a.faces[b].color=this.colors[4>b?0:1];b=new THREE.MeshBasicMaterial({vertexColors:THREE.FaceColors,wireframe:!0});this.lightSphere=new THREE.Mesh(a,b);this.add(this.lightSphere); +this.update()};THREE.HemisphereLightHelper.prototype=Object.create(THREE.Object3D.prototype);THREE.HemisphereLightHelper.prototype.dispose=function(){this.lightSphere.geometry.dispose();this.lightSphere.material.dispose()}; +THREE.HemisphereLightHelper.prototype.update=function(){var a=new THREE.Vector3;return function(){this.colors[0].copy(this.light.color).multiplyScalar(this.light.intensity);this.colors[1].copy(this.light.groundColor).multiplyScalar(this.light.intensity);this.lightSphere.lookAt(a.setFromMatrixPosition(this.light.matrixWorld).negate());this.lightSphere.geometry.colorsNeedUpdate=!0}}(); +THREE.PointLightHelper=function(a,b){this.light=a;this.light.updateMatrixWorld();var c=new THREE.SphereGeometry(b,4,2),d=new THREE.MeshBasicMaterial({wireframe:!0,fog:!1});d.color.copy(this.light.color).multiplyScalar(this.light.intensity);THREE.Mesh.call(this,c,d);this.matrix=this.light.matrixWorld;this.matrixAutoUpdate=!1};THREE.PointLightHelper.prototype=Object.create(THREE.Mesh.prototype);THREE.PointLightHelper.prototype.dispose=function(){this.geometry.dispose();this.material.dispose()}; +THREE.PointLightHelper.prototype.update=function(){this.material.color.copy(this.light.color).multiplyScalar(this.light.intensity)}; +THREE.SkeletonHelper=function(a){this.bones=this.getBoneList(a);for(var b=new THREE.Geometry,c=0;cs;s++){d[0]=t[g[s]];d[1]=t[g[(s+1)%3]];d.sort(f);var u=d.toString();void 0===e[u]&&(q[2*p]=d[0],q[2*p+1]=d[1],e[u]=!0,p++)}d=new Float32Array(6*p);m=0;for(r=p;ms;s++)p= +k[q[2*m+s]],g=6*m+3*s,d[g+0]=p.x,d[g+1]=p.y,d[g+2]=p.z;h.addAttribute("position",new THREE.BufferAttribute(d,3))}else if(a.geometry instanceof THREE.BufferGeometry){if(void 0!==a.geometry.attributes.index){k=a.geometry.attributes.position.array;r=a.geometry.attributes.index.array;n=a.geometry.drawcalls;p=0;0===n.length&&(n=[{count:r.length,index:0,start:0}]);for(var q=new Uint32Array(2*r.length),t=0,v=n.length;ts;s++)d[0]= +g+r[m+s],d[1]=g+r[m+(s+1)%3],d.sort(f),u=d.toString(),void 0===e[u]&&(q[2*p]=d[0],q[2*p+1]=d[1],e[u]=!0,p++);d=new Float32Array(6*p);m=0;for(r=p;ms;s++)g=6*m+3*s,p=3*q[2*m+s],d[g+0]=k[p],d[g+1]=k[p+1],d[g+2]=k[p+2]}else for(k=a.geometry.attributes.position.array,p=k.length/3,q=p/3,d=new Float32Array(6*p),m=0,r=q;ms;s++)g=18*m+6*s,q=9*m+3*s,d[g+0]=k[q],d[g+1]=k[q+1],d[g+2]=k[q+2],p=9*m+(s+1)%3*3,d[g+3]=k[p],d[g+4]=k[p+1],d[g+5]=k[p+2];h.addAttribute("position",new THREE.BufferAttribute(d, +3))}THREE.Line.call(this,h,new THREE.LineBasicMaterial({color:c}),THREE.LinePieces);this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1};THREE.WireframeHelper.prototype=Object.create(THREE.Line.prototype);THREE.ImmediateRenderObject=function(){THREE.Object3D.call(this);this.render=function(a){}};THREE.ImmediateRenderObject.prototype=Object.create(THREE.Object3D.prototype); +THREE.MorphBlendMesh=function(a,b){THREE.Mesh.call(this,a,b);this.animationsMap={};this.animationsList=[];var c=this.geometry.morphTargets.length;this.createAnimation("__default",0,c-1,c/1);this.setAnimationWeight("__default",1)};THREE.MorphBlendMesh.prototype=Object.create(THREE.Mesh.prototype); +THREE.MorphBlendMesh.prototype.createAnimation=function(a,b,c,d){b={startFrame:b,endFrame:c,length:c-b+1,fps:d,duration:(c-b)/d,lastFrame:0,currentFrame:0,active:!1,time:0,direction:1,weight:1,directionBackwards:!1,mirroredLoop:!1};this.animationsMap[a]=b;this.animationsList.push(b)}; +THREE.MorphBlendMesh.prototype.autoCreateAnimations=function(a){for(var b=/([a-z]+)_?(\d+)/,c,d={},e=this.geometry,f=0,g=e.morphTargets.length;fh.end&&(h.end=f);c||(c=k)}}for(k in d)h=d[k],this.createAnimation(k,h.start,h.end,a);this.firstAnimation=c}; +THREE.MorphBlendMesh.prototype.setAnimationDirectionForward=function(a){if(a=this.animationsMap[a])a.direction=1,a.directionBackwards=!1};THREE.MorphBlendMesh.prototype.setAnimationDirectionBackward=function(a){if(a=this.animationsMap[a])a.direction=-1,a.directionBackwards=!0};THREE.MorphBlendMesh.prototype.setAnimationFPS=function(a,b){var c=this.animationsMap[a];c&&(c.fps=b,c.duration=(c.end-c.start)/c.fps)}; +THREE.MorphBlendMesh.prototype.setAnimationDuration=function(a,b){var c=this.animationsMap[a];c&&(c.duration=b,c.fps=(c.end-c.start)/c.duration)};THREE.MorphBlendMesh.prototype.setAnimationWeight=function(a,b){var c=this.animationsMap[a];c&&(c.weight=b)};THREE.MorphBlendMesh.prototype.setAnimationTime=function(a,b){var c=this.animationsMap[a];c&&(c.time=b)};THREE.MorphBlendMesh.prototype.getAnimationTime=function(a){var b=0;if(a=this.animationsMap[a])b=a.time;return b}; +THREE.MorphBlendMesh.prototype.getAnimationDuration=function(a){var b=-1;if(a=this.animationsMap[a])b=a.duration;return b};THREE.MorphBlendMesh.prototype.playAnimation=function(a){var b=this.animationsMap[a];b?(b.time=0,b.active=!0):console.warn("animation["+a+"] undefined")};THREE.MorphBlendMesh.prototype.stopAnimation=function(a){if(a=this.animationsMap[a])a.active=!1}; +THREE.MorphBlendMesh.prototype.update=function(a){for(var b=0,c=this.animationsList.length;bd.duration||0>d.time)d.direction*=-1,d.time>d.duration&&(d.time=d.duration,d.directionBackwards=!0),0>d.time&&(d.time=0,d.directionBackwards=!1)}else d.time%=d.duration,0>d.time&&(d.time+=d.duration);var f=d.startFrame+THREE.Math.clamp(Math.floor(d.time/e),0,d.length-1),g=d.weight; +f!==d.currentFrame&&(this.morphTargetInfluences[d.lastFrame]=0,this.morphTargetInfluences[d.currentFrame]=1*g,this.morphTargetInfluences[f]=0,d.lastFrame=d.currentFrame,d.currentFrame=f);e=d.time%e/e;d.directionBackwards&&(e=1-e);this.morphTargetInfluences[d.currentFrame]=e*g;this.morphTargetInfluences[d.lastFrame]=(1-e)*g}}}; diff --git a/plugins/Sidebar/media-globe/world.jpg b/plugins/Sidebar/media-globe/world.jpg new file mode 100644 index 00000000..222bd939 Binary files /dev/null and b/plugins/Sidebar/media-globe/world.jpg differ diff --git a/plugins/Sidebar/media/Class.coffee b/plugins/Sidebar/media/Class.coffee new file mode 100644 index 00000000..d62ab25c --- /dev/null +++ b/plugins/Sidebar/media/Class.coffee @@ -0,0 +1,23 @@ +class Class + trace: true + + log: (args...) -> + return unless @trace + return if typeof console is 'undefined' + args.unshift("[#{@.constructor.name}]") + console.log(args...) + @ + + logStart: (name, args...) -> + return unless @trace + @logtimers or= {} + @logtimers[name] = +(new Date) + @log "#{name}", args..., "(started)" if args.length > 0 + @ + + logEnd: (name, args...) -> + ms = +(new Date)-@logtimers[name] + @log "#{name}", args..., "(Done in #{ms}ms)" + @ + +window.Class = Class \ No newline at end of file diff --git a/plugins/Sidebar/media/Scrollable.js b/plugins/Sidebar/media/Scrollable.js new file mode 100644 index 00000000..689a5719 --- /dev/null +++ b/plugins/Sidebar/media/Scrollable.js @@ -0,0 +1,91 @@ +/* via http://jsfiddle.net/elGrecode/00dgurnn/ */ + +window.initScrollable = function () { + + var scrollContainer = document.querySelector('.scrollable'), + scrollContentWrapper = document.querySelector('.scrollable .content-wrapper'), + scrollContent = document.querySelector('.scrollable .content'), + contentPosition = 0, + scrollerBeingDragged = false, + scroller, + topPosition, + scrollerHeight; + + function calculateScrollerHeight() { + // *Calculation of how tall scroller should be + var visibleRatio = scrollContainer.offsetHeight / scrollContentWrapper.scrollHeight; + if (visibleRatio == 1) + scroller.style.display = "none"; + else + scroller.style.display = "block"; + return visibleRatio * scrollContainer.offsetHeight; + } + + function moveScroller(evt) { + // Move Scroll bar to top offset + var scrollPercentage = evt.target.scrollTop / scrollContentWrapper.scrollHeight; + topPosition = scrollPercentage * (scrollContainer.offsetHeight - 5); // 5px arbitrary offset so scroll bar doesn't move too far beyond content wrapper bounding box + scroller.style.top = topPosition + 'px'; + } + + function startDrag(evt) { + normalizedPosition = evt.pageY; + contentPosition = scrollContentWrapper.scrollTop; + scrollerBeingDragged = true; + window.addEventListener('mousemove', scrollBarScroll); + return false; + } + + function stopDrag(evt) { + scrollerBeingDragged = false; + window.removeEventListener('mousemove', scrollBarScroll); + } + + function scrollBarScroll(evt) { + if (scrollerBeingDragged === true) { + evt.preventDefault(); + var mouseDifferential = evt.pageY - normalizedPosition; + var scrollEquivalent = mouseDifferential * (scrollContentWrapper.scrollHeight / scrollContainer.offsetHeight); + scrollContentWrapper.scrollTop = contentPosition + scrollEquivalent; + } + } + + function updateHeight() { + scrollerHeight = calculateScrollerHeight() - 10; + scroller.style.height = scrollerHeight + 'px'; + } + + function createScroller() { + // *Creates scroller element and appends to '.scrollable' div + // create scroller element + scroller = document.createElement("div"); + scroller.className = 'scroller'; + + // determine how big scroller should be based on content + scrollerHeight = calculateScrollerHeight() - 10; + + if (scrollerHeight / scrollContainer.offsetHeight < 1) { + // *If there is a need to have scroll bar based on content size + scroller.style.height = scrollerHeight + 'px'; + + // append scroller to scrollContainer div + scrollContainer.appendChild(scroller); + + // show scroll path divot + scrollContainer.className += ' showScroll'; + + // attach related draggable listeners + scroller.addEventListener('mousedown', startDrag); + window.addEventListener('mouseup', stopDrag); + } + + } + + createScroller(); + + + // *** Listeners *** + scrollContentWrapper.addEventListener('scroll', moveScroller); + + return updateHeight; +}; \ No newline at end of file diff --git a/plugins/Sidebar/media/Scrollbable.css b/plugins/Sidebar/media/Scrollbable.css new file mode 100644 index 00000000..b11faea0 --- /dev/null +++ b/plugins/Sidebar/media/Scrollbable.css @@ -0,0 +1,44 @@ +.scrollable { + overflow: hidden; +} + +.scrollable.showScroll::after { + position: absolute; + content: ''; + top: 5%; + right: 7px; + height: 90%; + width: 3px; + background: rgba(224, 224, 255, .3); +} + +.scrollable .content-wrapper { + width: 100%; + height: 100%; + padding-right: 50%; + overflow-y: scroll; +} +.scroller { + margin-top: 5px; + z-index: 5; + cursor: pointer; + position: absolute; + width: 7px; + border-radius: 5px; + background: #151515; + top: 0px; + left: 395px; + -webkit-transition: top .08s; + -moz-transition: top .08s; + -ms-transition: top .08s; + -o-transition: top .08s; + transition: top .08s; +} +.scroller { + -webkit-touch-callout: none; + -webkit-user-select: none; + -khtml-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} \ No newline at end of file diff --git a/plugins/Sidebar/media/Sidebar.coffee b/plugins/Sidebar/media/Sidebar.coffee new file mode 100644 index 00000000..6b7f20f8 --- /dev/null +++ b/plugins/Sidebar/media/Sidebar.coffee @@ -0,0 +1,326 @@ +class Sidebar extends Class + constructor: -> + @tag = null + @container = null + @opened = false + @width = 410 + @fixbutton = $(".fixbutton") + @fixbutton_addx = 0 + @fixbutton_initx = 0 + @fixbutton_targetx = 0 + @frame = $("#inner-iframe") + @initFixbutton() + @dragStarted = 0 + @globe = null + + @original_set_site_info = wrapper.setSiteInfo # We going to override this, save the original + + # Start in opened state for debugging + if false + @startDrag() + @moved() + @fixbutton_targetx = @fixbutton_initx - @width + @stopDrag() + + + initFixbutton: -> + # Detect dragging + @fixbutton.on "mousedown", (e) => + e.preventDefault() + + # Disable previous listeners + @fixbutton.off "click" + @fixbutton.off "mousemove" + + # Make sure its not a click + @dragStarted = (+ new Date) + @fixbutton.one "mousemove", (e) => + @fixbutton_addx = @fixbutton.offset().left-e.pageX + @startDrag() + @fixbutton.parent().on "click", (e) => + @stopDrag() + @fixbutton_initx = @fixbutton.offset().left # Initial x position + + + # Start dragging the fixbutton + startDrag: -> + @log "startDrag" + @fixbutton_targetx = @fixbutton_initx # Fallback x position + + @fixbutton.addClass("dragging") + + # Fullscreen drag bg to capture mouse events over iframe + $("
    ").appendTo(document.body) + + # IE position wrap fix + if navigator.userAgent.indexOf('MSIE') != -1 or navigator.appVersion.indexOf('Trident/') > 0 + @fixbutton.css("pointer-events", "none") + + # Don't go to homepage + @fixbutton.one "click", (e) => + @stopDrag() + @fixbutton.removeClass("dragging") + if Math.abs(@fixbutton.offset().left - @fixbutton_initx) > 5 + # If moved more than some pixel the button then don't go to homepage + e.preventDefault() + + # Animate drag + @fixbutton.parents().on "mousemove", @animDrag + @fixbutton.parents().on "mousemove" ,@waitMove + + # Stop dragging listener + @fixbutton.parents().on "mouseup", (e) => + e.preventDefault() + @stopDrag() + + + # Wait for moving the fixbutton + waitMove: (e) => + if Math.abs(@fixbutton.offset().left - @fixbutton_targetx) > 10 and (+ new Date)-@dragStarted > 100 + @moved() + @fixbutton.parents().off "mousemove" ,@waitMove + + moved: -> + @log "Moved" + @createHtmltag() + $(document.body).css("perspective", "1000px").addClass("body-sidebar") + $(window).off "resize" + $(window).on "resize", => + $(document.body).css "height", $(window).height() + @scrollable() + $(window).trigger "resize" + + # Override setsiteinfo to catch changes + wrapper.setSiteInfo = (site_info) => + @setSiteInfo(site_info) + @original_set_site_info.apply(wrapper, arguments) + + setSiteInfo: (site_info) -> + @updateHtmlTag() + @displayGlobe() + + + # Create the sidebar html tag + createHtmltag: -> + if not @container + @container = $(""" + + """) + @container.appendTo(document.body) + @tag = @container.find(".sidebar") + @updateHtmlTag() + @scrollable = window.initScrollable() + + + updateHtmlTag: -> + wrapper.ws.cmd "sidebarGetHtmlTag", {}, (res) => + if @tag.find(".content").children().length == 0 # First update + @log "Creating content" + morphdom(@tag.find(".content")[0], '
    '+res+'
    ') + @scrollable() + + else # Not first update, patch the html to keep unchanged dom elements + @log "Patching content" + morphdom @tag.find(".content")[0], '
    '+res+'
    ', { + onBeforeMorphEl: (from_el, to_el) -> # Ignore globe loaded state + if from_el.className == "globe" + return false + else + return true + } + + + animDrag: (e) => + mousex = e.pageX + + overdrag = @fixbutton_initx-@width-mousex + if overdrag > 0 # Overdragged + overdrag_percent = 1+overdrag/300 + mousex = (e.pageX + (@fixbutton_initx-@width)*overdrag_percent)/(1+overdrag_percent) + targetx = @fixbutton_initx-mousex-@fixbutton_addx + + @fixbutton.offset + left: mousex+@fixbutton_addx + + if @tag + @tag.css("transform", "translateX(#{0-targetx}px)") + + # Check if opened + if (not @opened and targetx > @width/3) or (@opened and targetx > @width*0.9) + @fixbutton_targetx = @fixbutton_initx - @width # Make it opened + else + @fixbutton_targetx = @fixbutton_initx + + + # Stop dragging the fixbutton + stopDrag: -> + @fixbutton.parents().off "mousemove" + @fixbutton.off "mousemove" + @fixbutton.css("pointer-events", "") + $(".drag-bg").remove() + if not @fixbutton.hasClass("dragging") + return + @fixbutton.removeClass("dragging") + + # Move back to initial position + if @fixbutton_targetx != @fixbutton.offset().left + # Animate fixbutton + @fixbutton.stop().animate {"left": @fixbutton_targetx}, 500, "easeOutBack", => + # Switch back to auto align + if @fixbutton_targetx == @fixbutton_initx # Closed + @fixbutton.css("left", "auto") + else # Opened + @fixbutton.css("left", @fixbutton_targetx) + + $(".fixbutton-bg").trigger "mouseout" # Switch fixbutton back to normal status + + # Animate sidebar and iframe + if @fixbutton_targetx == @fixbutton_initx + # Closed + targetx = 0 + @opened = false + else + # Opened + targetx = @width + if not @opened + @onOpened() + @opened = true + + # Revent sidebar transitions + @tag.css("transition", "0.4s ease-out") + @tag.css("transform", "translateX(-#{targetx}px)").one transitionEnd, => + @tag.css("transition", "") + if not @opened + @container.remove() + @container = null + @tag.remove() + @tag = null + + # Revert body transformations + @log "stopdrag", "opened:", @opened + if not @opened + @onClosed() + + + onOpened: -> + @log "Opened" + @scrollable() + + # Re-calculate height when site admin opened or closed + @tag.find("#checkbox-owned").off("click").on "click", => + setTimeout (=> + @scrollable() + ), 300 + + # Site limit button + @tag.find("#button-sitelimit").on "click", => + wrapper.ws.cmd "siteSetLimit", $("#input-sitelimit").val(), => + wrapper.notifications.add "done-sitelimit", "done", "Site storage limit modified!", 5000 + @updateHtmlTag() + return false + + # Owned checkbox + @tag.find("#checkbox-owned").on "click", => + wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")] + + # Owned checkbox + @tag.find("#checkbox-autodownloadoptional").on "click", => + wrapper.ws.cmd "siteSetAutodownloadoptional", [@tag.find("#checkbox-autodownloadoptional").is(":checked")] + + # Change identity button + @tag.find("#button-identity").on "click", => + wrapper.ws.cmd "certSelect" + return false + + # Owned checkbox + @tag.find("#checkbox-owned").on "click", => + wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")] + + # Save settings + @tag.find("#button-settings").on "click", => + wrapper.ws.cmd "fileGet", "content.json", (res) => + data = JSON.parse(res) + data["title"] = $("#settings-title").val() + data["description"] = $("#settings-description").val() + json_raw = unescape(encodeURIComponent(JSON.stringify(data, undefined, '\t'))) + wrapper.ws.cmd "fileWrite", ["content.json", btoa(json_raw)], (res) => + if res != "ok" # fileWrite failed + wrapper.notifications.add "file-write", "error", "File write error: #{res}" + else + wrapper.notifications.add "file-write", "done", "Site settings saved!", 5000 + @updateHtmlTag() + return false + + # Sign content.json + @tag.find("#button-sign").on "click", => + inner_path = @tag.find("#select-contents").val() + + if wrapper.site_info.privatekey + # Privatekey stored in users.json + wrapper.ws.cmd "siteSign", ["stored", inner_path], (res) => + wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000 + + else + # Ask the user for privatekey + wrapper.displayPrompt "Enter your private key:", "password", "Sign", (privatekey) => # Prompt the private key + wrapper.ws.cmd "siteSign", [privatekey, inner_path], (res) => + if res == "ok" + wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000 + + return false + + # Publish content.json + @tag.find("#button-publish").on "click", => + inner_path = @tag.find("#select-contents").val() + @tag.find("#button-publish").addClass "loading" + wrapper.ws.cmd "sitePublish", {"inner_path": inner_path, "sign": false}, => + @tag.find("#button-publish").removeClass "loading" + + @loadGlobe() + + + onClosed: -> + $(window).off "resize" + $(document.body).css("transition", "0.6s ease-in-out").removeClass("body-sidebar").on transitionEnd, (e) => + if e.target == document.body + $(document.body).css("height", "auto").css("perspective", "").css("transition", "").off transitionEnd + @unloadGlobe() + + # We dont need site info anymore + wrapper.setSiteInfo = @original_set_site_info + + + loadGlobe: => + if @tag.find(".globe").hasClass("loading") + setTimeout (=> + if typeof(DAT) == "undefined" # Globe script not loaded, do it first + $.getScript("/uimedia/globe/all.js", @displayGlobe) + else + @displayGlobe() + ), 600 + + + displayGlobe: => + wrapper.ws.cmd "sidebarGetPeers", [], (globe_data) => + if @globe + @globe.scene.remove(@globe.points) + @globe.addData( globe_data, {format: 'magnitude', name: "hello", animated: false} ) + @globe.createPoints() + else + @globe = new DAT.Globe( @tag.find(".globe")[0], {"imgDir": "/uimedia/globe/"} ) + @globe.addData( globe_data, {format: 'magnitude', name: "hello"} ) + @globe.createPoints() + @globe.animate() + @tag.find(".globe").removeClass("loading") + + + unloadGlobe: => + if not @globe + return false + @globe.unload() + @globe = null + + +window.sidebar = new Sidebar() +window.transitionEnd = 'transitionend webkitTransitionEnd oTransitionEnd otransitionend' diff --git a/plugins/Sidebar/media/Sidebar.css b/plugins/Sidebar/media/Sidebar.css new file mode 100644 index 00000000..bd7bced2 --- /dev/null +++ b/plugins/Sidebar/media/Sidebar.css @@ -0,0 +1,99 @@ +.drag-bg { width: 100%; height: 100%; position: absolute; } +.fixbutton.dragging { cursor: -webkit-grabbing; } +.fixbutton-bg:active { cursor: -webkit-grabbing; } + + +.body-sidebar { background-color: #666 !important; } +#inner-iframe { transition: 0.3s ease-in-out; transform-origin: left; backface-visibility: hidden; outline: 1px solid transparent } +.body-sidebar iframe { transform: rotateY(5deg); opacity: 0.8; pointer-events: none } /* translateX(-200px) scale(0.95)*/ + +/* SIDEBAR */ + +.sidebar-container { width: 100%; height: 100%; overflow: hidden; position: absolute; } +.sidebar { background-color: #212121; position: absolute; right: -1200px; height: 100%; width: 1200px; } /*box-shadow: inset 0px 0px 10px #000*/ +.sidebar .content { margin: 30px; font-family: "Segoe UI Light", "Segoe UI", "Helvetica Neue"; color: white; width: 375px; height: 300px; font-weight: 200 } +.sidebar h1, .sidebar h2 { font-weight: lighter; } +.sidebar .button { margin: 0px; display: inline-block; } + + +/* FIELDS */ + +.sidebar .fields { padding: 0px; list-style-type: none; width: 355px; } +.sidebar .fields > li, .sidebar .fields .settings-owned > li { margin-bottom: 30px } +.sidebar .fields > li:after, .sidebar .fields .settings-owned > li:after { clear: both; content: ''; display: block } +.sidebar .fields label { + font-family: Consolas, monospace; text-transform: uppercase; font-size: 13px; color: #ACACAC; display: inline-block; margin-bottom: 10px; + vertical-align: text-bottom; margin-right: 10px; +} +.sidebar .fields label small { font-weight: normal; color: white; text-transform: none; } +.sidebar .fields .text { background-color: black; border: 0px; padding: 10px; color: white; border-radius: 3px; width: 250px; font-family: Consolas, monospace; } +.sidebar .fields .text.long { width: 330px; font-size: 72%; } +.sidebar .fields .disabled { color: #AAA; background-color: #3B3B3B; } +.sidebar .fields .text-num { width: 30px; text-align: right; padding-right: 30px; } +.sidebar .fields .text-post { color: white; font-family: Consolas, monospace; display: inline-block; font-size: 13px; margin-left: -25px; width: 25px; } + +/* Select */ +.sidebar .fields select { + width: 225px; background-color: #3B3B3B; color: white; font-family: Consolas, monospace; appearance: none; + padding: 5px; padding-right: 25px; border: 0px; border-radius: 3px; height: 35px; vertical-align: 1px; box-shadow: 0px 1px 2px rgba(0,0,0,0.5); +} +.sidebar .fields .select-down { margin-left: -39px; width: 34px; display: inline-block; transform: rotateZ(90deg); height: 35px; vertical-align: -8px; pointer-events: none; font-weight: bold } + +/* Checkbox */ +.sidebar .fields .checkbox { width: 50px; height: 24px; position: relative; z-index: 999; opacity: 0; } +.sidebar .fields .checkbox-skin { background-color: #CCC; width: 50px; height: 24px; border-radius: 15px; transition: all 0.3s ease-in-out; display: inline-block; margin-left: -59px; } +.sidebar .fields .checkbox-skin:before { + content: ""; position: relative; width: 20px; background-color: white; height: 20px; display: block; border-radius: 100%; margin-top: 2px; margin-left: 2px; + transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); +} +.sidebar .fields .checkbox:checked ~ .checkbox-skin:before { margin-left: 27px; } +.sidebar .fields .checkbox:checked ~ .checkbox-skin { background-color: #2ECC71; } + +/* Fake input */ +.sidebar .input { font-size: 13px; width: 250px; display: inline-block; overflow: hidden; text-overflow: ellipsis; vertical-align: top } + +/* GRAPH */ + +.graph { padding: 0px; list-style-type: none; width: 351px; background-color: black; height: 10px; border-radius: 8px; overflow: hidden; position: relative;} +.graph li { height: 100%; position: absolute; transition: all 0.3s; } +.graph-stacked li { position: static; float: left; } + +.graph-legend { padding: 0px; list-style-type: none; margin-top: 13px; font-family: Consolas, "Andale Mono", monospace; font-size: 13px; text-transform: capitalize; } +.sidebar .graph-legend li { margin: 0px; margin-top: 5px; margin-left: 0px; width: 160px; float: left; position: relative; } +.sidebar .graph-legend li:nth-child(odd) { margin-right: 29px } +.graph-legend span { position: absolute; } +.graph-legend b { text-align: right; display: inline-block; width: 50px; float: right; font-weight: normal; } +.graph-legend li:before { content: '\2022'; font-size: 23px; line-height: 0px; vertical-align: -3px; margin-right: 5px; } + +/* COLORS */ + +.back-green { background-color: #2ECC71 } +.color-green:before { color: #2ECC71 } +.back-blue { background-color: #3BAFDA } +.color-blue:before { color: #3BAFDA } +.back-darkblue { background-color: #2196F3 } +.color-darkblue:before { color: #2196F3 } +.back-purple { background-color: #B10DC9 } +.color-purple:before { color: #B10DC9 } +.back-yellow { background-color: #FFDC00 } +.color-yellow:before { color: #FFDC00 } +.back-orange { background-color: #FF9800 } +.color-orange:before { color: #FF9800 } +.back-gray { background-color: #ECF0F1 } +.color-gray:before { color: #ECF0F1 } +.back-black { background-color: #34495E } +.color-black:before { color: #34495E } +.back-white { background-color: #EEE } +.color-white:before { color: #EEE } + + +/* Settings owned */ + +.owned-title { float: left } +#checkbox-owned { margin-bottom: 25px; margin-top: 26px; margin-left: 11px; } +#checkbox-owned ~ .settings-owned { opacity: 0; max-height: 0px; transition: all 0.3s linear; overflow: hidden } +#checkbox-owned:checked ~ .settings-owned { opacity: 1; max-height: 400px } + +/* Globe */ +.globe { width: 360px; height: 360px } +.globe.loading { background: url(/uimedia/img/loading-circle.gif) center center no-repeat } \ No newline at end of file diff --git a/plugins/Sidebar/media/all.css b/plugins/Sidebar/media/all.css new file mode 100644 index 00000000..6496d130 --- /dev/null +++ b/plugins/Sidebar/media/all.css @@ -0,0 +1,153 @@ + + +/* ---- plugins/Sidebar/media/Scrollbable.css ---- */ + + +.scrollable { + overflow: hidden; +} + +.scrollable.showScroll::after { + position: absolute; + content: ''; + top: 5%; + right: 7px; + height: 90%; + width: 3px; + background: rgba(224, 224, 255, .3); +} + +.scrollable .content-wrapper { + width: 100%; + height: 100%; + padding-right: 50%; + overflow-y: scroll; +} +.scroller { + margin-top: 5px; + z-index: 5; + cursor: pointer; + position: absolute; + width: 7px; + -webkit-border-radius: 5px; -moz-border-radius: 5px; -o-border-radius: 5px; -ms-border-radius: 5px; border-radius: 5px ; + background: #151515; + top: 0px; + left: 395px; + -webkit-transition: top .08s; + -moz-transition: top .08s; + -ms-transition: top .08s; + -o-transition: top .08s; + -webkit-transition: top .08s; -moz-transition: top .08s; -o-transition: top .08s; -ms-transition: top .08s; transition: top .08s ; +} +.scroller { + -webkit-touch-callout: none; + -webkit-user-select: none; + -khtml-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} + + +/* ---- plugins/Sidebar/media/Sidebar.css ---- */ + + +.drag-bg { width: 100%; height: 100%; position: absolute; } +.fixbutton.dragging { cursor: -webkit-grabbing; } +.fixbutton-bg:active { cursor: -webkit-grabbing; } + + +.body-sidebar { background-color: #666 !important; } +#inner-iframe { -webkit-transition: 0.3s ease-in-out; -moz-transition: 0.3s ease-in-out; -o-transition: 0.3s ease-in-out; -ms-transition: 0.3s ease-in-out; transition: 0.3s ease-in-out ; transform-origin: left; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; outline: 1px solid transparent } +.body-sidebar iframe { -webkit-transform: rotateY(5deg); -moz-transform: rotateY(5deg); -o-transform: rotateY(5deg); -ms-transform: rotateY(5deg); transform: rotateY(5deg) ; opacity: 0.8; pointer-events: none } /* translateX(-200px) scale(0.95)*/ + +/* SIDEBAR */ + +.sidebar-container { width: 100%; height: 100%; overflow: hidden; position: absolute; } +.sidebar { background-color: #212121; position: absolute; right: -1200px; height: 100%; width: 1200px; } /*box-shadow: inset 0px 0px 10px #000*/ +.sidebar .content { margin: 30px; font-family: "Segoe UI Light", "Segoe UI", "Helvetica Neue"; color: white; width: 375px; height: 300px; font-weight: 200 } +.sidebar h1, .sidebar h2 { font-weight: lighter; } +.sidebar .button { margin: 0px; display: inline-block; } + + +/* FIELDS */ + +.sidebar .fields { padding: 0px; list-style-type: none; width: 355px; } +.sidebar .fields > li, .sidebar .fields .settings-owned > li { margin-bottom: 30px } +.sidebar .fields > li:after, .sidebar .fields .settings-owned > li:after { clear: both; content: ''; display: block } +.sidebar .fields label { + font-family: Consolas, monospace; text-transform: uppercase; font-size: 13px; color: #ACACAC; display: inline-block; margin-bottom: 10px; + vertical-align: text-bottom; margin-right: 10px; +} +.sidebar .fields label small { font-weight: normal; color: white; text-transform: none; } +.sidebar .fields .text { background-color: black; border: 0px; padding: 10px; color: white; -webkit-border-radius: 3px; -moz-border-radius: 3px; -o-border-radius: 3px; -ms-border-radius: 3px; border-radius: 3px ; width: 250px; font-family: Consolas, monospace; } +.sidebar .fields .text.long { width: 330px; font-size: 72%; } +.sidebar .fields .disabled { color: #AAA; background-color: #3B3B3B; } +.sidebar .fields .text-num { width: 30px; text-align: right; padding-right: 30px; } +.sidebar .fields .text-post { color: white; font-family: Consolas, monospace; display: inline-block; font-size: 13px; margin-left: -25px; width: 25px; } + +/* Select */ +.sidebar .fields select { + width: 225px; background-color: #3B3B3B; color: white; font-family: Consolas, monospace; -webkit-appearance: none; -moz-appearance: none; -o-appearance: none; -ms-appearance: none; appearance: none ; + padding: 5px; padding-right: 25px; border: 0px; -webkit-border-radius: 3px; -moz-border-radius: 3px; -o-border-radius: 3px; -ms-border-radius: 3px; border-radius: 3px ; height: 35px; vertical-align: 1px; -webkit-box-shadow: 0px 1px 2px rgba(0,0,0,0.5); -moz-box-shadow: 0px 1px 2px rgba(0,0,0,0.5); -o-box-shadow: 0px 1px 2px rgba(0,0,0,0.5); -ms-box-shadow: 0px 1px 2px rgba(0,0,0,0.5); box-shadow: 0px 1px 2px rgba(0,0,0,0.5) ; +} +.sidebar .fields .select-down { margin-left: -39px; width: 34px; display: inline-block; -webkit-transform: rotateZ(90deg); -moz-transform: rotateZ(90deg); -o-transform: rotateZ(90deg); -ms-transform: rotateZ(90deg); transform: rotateZ(90deg) ; height: 35px; vertical-align: -8px; pointer-events: none; font-weight: bold } + +/* Checkbox */ +.sidebar .fields .checkbox { width: 50px; height: 24px; position: relative; z-index: 999; opacity: 0; } +.sidebar .fields .checkbox-skin { background-color: #CCC; width: 50px; height: 24px; -webkit-border-radius: 15px; -moz-border-radius: 15px; -o-border-radius: 15px; -ms-border-radius: 15px; border-radius: 15px ; -webkit-transition: all 0.3s ease-in-out; -moz-transition: all 0.3s ease-in-out; -o-transition: all 0.3s ease-in-out; -ms-transition: all 0.3s ease-in-out; transition: all 0.3s ease-in-out ; display: inline-block; margin-left: -59px; } +.sidebar .fields .checkbox-skin:before { + content: ""; position: relative; width: 20px; background-color: white; height: 20px; display: block; -webkit-border-radius: 100%; -moz-border-radius: 100%; -o-border-radius: 100%; -ms-border-radius: 100%; border-radius: 100% ; margin-top: 2px; margin-left: 2px; + -webkit-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -moz-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -o-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -ms-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86) ; +} +.sidebar .fields .checkbox:checked ~ .checkbox-skin:before { margin-left: 27px; } +.sidebar .fields .checkbox:checked ~ .checkbox-skin { background-color: #2ECC71; } + +/* Fake input */ +.sidebar .input { font-size: 13px; width: 250px; display: inline-block; overflow: hidden; text-overflow: ellipsis; vertical-align: top } + +/* GRAPH */ + +.graph { padding: 0px; list-style-type: none; width: 351px; background-color: black; height: 10px; -webkit-border-radius: 8px; -moz-border-radius: 8px; -o-border-radius: 8px; -ms-border-radius: 8px; border-radius: 8px ; overflow: hidden; position: relative;} +.graph li { height: 100%; position: absolute; -webkit-transition: all 0.3s; -moz-transition: all 0.3s; -o-transition: all 0.3s; -ms-transition: all 0.3s; transition: all 0.3s ; } +.graph-stacked li { position: static; float: left; } + +.graph-legend { padding: 0px; list-style-type: none; margin-top: 13px; font-family: Consolas, "Andale Mono", monospace; font-size: 13px; text-transform: capitalize; } +.sidebar .graph-legend li { margin: 0px; margin-top: 5px; margin-left: 0px; width: 160px; float: left; position: relative; } +.sidebar .graph-legend li:nth-child(odd) { margin-right: 29px } +.graph-legend span { position: absolute; } +.graph-legend b { text-align: right; display: inline-block; width: 50px; float: right; font-weight: normal; } +.graph-legend li:before { content: '\2022'; font-size: 23px; line-height: 0px; vertical-align: -3px; margin-right: 5px; } + +/* COLORS */ + +.back-green { background-color: #2ECC71 } +.color-green:before { color: #2ECC71 } +.back-blue { background-color: #3BAFDA } +.color-blue:before { color: #3BAFDA } +.back-darkblue { background-color: #2196F3 } +.color-darkblue:before { color: #2196F3 } +.back-purple { background-color: #B10DC9 } +.color-purple:before { color: #B10DC9 } +.back-yellow { background-color: #FFDC00 } +.color-yellow:before { color: #FFDC00 } +.back-orange { background-color: #FF9800 } +.color-orange:before { color: #FF9800 } +.back-gray { background-color: #ECF0F1 } +.color-gray:before { color: #ECF0F1 } +.back-black { background-color: #34495E } +.color-black:before { color: #34495E } +.back-white { background-color: #EEE } +.color-white:before { color: #EEE } + + +/* Settings owned */ + +.owned-title { float: left } +#checkbox-owned { margin-bottom: 25px; margin-top: 26px; margin-left: 11px; } +#checkbox-owned ~ .settings-owned { opacity: 0; max-height: 0px; -webkit-transition: all 0.3s linear; -moz-transition: all 0.3s linear; -o-transition: all 0.3s linear; -ms-transition: all 0.3s linear; transition: all 0.3s linear ; overflow: hidden } +#checkbox-owned:checked ~ .settings-owned { opacity: 1; max-height: 400px } + +/* Globe */ +.globe { width: 360px; height: 360px } +.globe.loading { background: url(/uimedia/img/loading-circle.gif) center center no-repeat } \ No newline at end of file diff --git a/plugins/Sidebar/media/all.js b/plugins/Sidebar/media/all.js new file mode 100644 index 00000000..ecc738fb --- /dev/null +++ b/plugins/Sidebar/media/all.js @@ -0,0 +1,893 @@ + + +/* ---- plugins/Sidebar/media/Class.coffee ---- */ + + +(function() { + var Class, + __slice = [].slice; + + Class = (function() { + function Class() {} + + Class.prototype.trace = true; + + Class.prototype.log = function() { + var args; + args = 1 <= arguments.length ? __slice.call(arguments, 0) : []; + if (!this.trace) { + return; + } + if (typeof console === 'undefined') { + return; + } + args.unshift("[" + this.constructor.name + "]"); + console.log.apply(console, args); + return this; + }; + + Class.prototype.logStart = function() { + var args, name; + name = arguments[0], args = 2 <= arguments.length ? __slice.call(arguments, 1) : []; + if (!this.trace) { + return; + } + this.logtimers || (this.logtimers = {}); + this.logtimers[name] = +(new Date); + if (args.length > 0) { + this.log.apply(this, ["" + name].concat(__slice.call(args), ["(started)"])); + } + return this; + }; + + Class.prototype.logEnd = function() { + var args, ms, name; + name = arguments[0], args = 2 <= arguments.length ? __slice.call(arguments, 1) : []; + ms = +(new Date) - this.logtimers[name]; + this.log.apply(this, ["" + name].concat(__slice.call(args), ["(Done in " + ms + "ms)"])); + return this; + }; + + return Class; + + })(); + + window.Class = Class; + +}).call(this); + + +/* ---- plugins/Sidebar/media/Scrollable.js ---- */ + + +/* via http://jsfiddle.net/elGrecode/00dgurnn/ */ + +window.initScrollable = function () { + + var scrollContainer = document.querySelector('.scrollable'), + scrollContentWrapper = document.querySelector('.scrollable .content-wrapper'), + scrollContent = document.querySelector('.scrollable .content'), + contentPosition = 0, + scrollerBeingDragged = false, + scroller, + topPosition, + scrollerHeight; + + function calculateScrollerHeight() { + // *Calculation of how tall scroller should be + var visibleRatio = scrollContainer.offsetHeight / scrollContentWrapper.scrollHeight; + if (visibleRatio == 1) + scroller.style.display = "none"; + else + scroller.style.display = "block"; + return visibleRatio * scrollContainer.offsetHeight; + } + + function moveScroller(evt) { + // Move Scroll bar to top offset + var scrollPercentage = evt.target.scrollTop / scrollContentWrapper.scrollHeight; + topPosition = scrollPercentage * (scrollContainer.offsetHeight - 5); // 5px arbitrary offset so scroll bar doesn't move too far beyond content wrapper bounding box + scroller.style.top = topPosition + 'px'; + } + + function startDrag(evt) { + normalizedPosition = evt.pageY; + contentPosition = scrollContentWrapper.scrollTop; + scrollerBeingDragged = true; + window.addEventListener('mousemove', scrollBarScroll); + return false; + } + + function stopDrag(evt) { + scrollerBeingDragged = false; + window.removeEventListener('mousemove', scrollBarScroll); + } + + function scrollBarScroll(evt) { + if (scrollerBeingDragged === true) { + evt.preventDefault(); + var mouseDifferential = evt.pageY - normalizedPosition; + var scrollEquivalent = mouseDifferential * (scrollContentWrapper.scrollHeight / scrollContainer.offsetHeight); + scrollContentWrapper.scrollTop = contentPosition + scrollEquivalent; + } + } + + function updateHeight() { + scrollerHeight = calculateScrollerHeight() - 10; + scroller.style.height = scrollerHeight + 'px'; + } + + function createScroller() { + // *Creates scroller element and appends to '.scrollable' div + // create scroller element + scroller = document.createElement("div"); + scroller.className = 'scroller'; + + // determine how big scroller should be based on content + scrollerHeight = calculateScrollerHeight() - 10; + + if (scrollerHeight / scrollContainer.offsetHeight < 1) { + // *If there is a need to have scroll bar based on content size + scroller.style.height = scrollerHeight + 'px'; + + // append scroller to scrollContainer div + scrollContainer.appendChild(scroller); + + // show scroll path divot + scrollContainer.className += ' showScroll'; + + // attach related draggable listeners + scroller.addEventListener('mousedown', startDrag); + window.addEventListener('mouseup', stopDrag); + } + + } + + createScroller(); + + + // *** Listeners *** + scrollContentWrapper.addEventListener('scroll', moveScroller); + + return updateHeight; +}; + + +/* ---- plugins/Sidebar/media/Sidebar.coffee ---- */ + + +(function() { + var Sidebar, + __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; }, + __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + __hasProp = {}.hasOwnProperty; + + Sidebar = (function(_super) { + __extends(Sidebar, _super); + + function Sidebar() { + this.unloadGlobe = __bind(this.unloadGlobe, this); + this.displayGlobe = __bind(this.displayGlobe, this); + this.loadGlobe = __bind(this.loadGlobe, this); + this.animDrag = __bind(this.animDrag, this); + this.waitMove = __bind(this.waitMove, this); + this.tag = null; + this.container = null; + this.opened = false; + this.width = 410; + this.fixbutton = $(".fixbutton"); + this.fixbutton_addx = 0; + this.fixbutton_initx = 0; + this.fixbutton_targetx = 0; + this.frame = $("#inner-iframe"); + this.initFixbutton(); + this.dragStarted = 0; + this.globe = null; + this.original_set_site_info = wrapper.setSiteInfo; + if (false) { + this.startDrag(); + this.moved(); + this.fixbutton_targetx = this.fixbutton_initx - this.width; + this.stopDrag(); + } + } + + Sidebar.prototype.initFixbutton = function() { + this.fixbutton.on("mousedown", (function(_this) { + return function(e) { + e.preventDefault(); + _this.fixbutton.off("click"); + _this.fixbutton.off("mousemove"); + _this.dragStarted = +(new Date); + return _this.fixbutton.one("mousemove", function(e) { + _this.fixbutton_addx = _this.fixbutton.offset().left - e.pageX; + return _this.startDrag(); + }); + }; + })(this)); + this.fixbutton.parent().on("click", (function(_this) { + return function(e) { + return _this.stopDrag(); + }; + })(this)); + return this.fixbutton_initx = this.fixbutton.offset().left; + }; + + Sidebar.prototype.startDrag = function() { + this.log("startDrag"); + this.fixbutton_targetx = this.fixbutton_initx; + this.fixbutton.addClass("dragging"); + $("
    ").appendTo(document.body); + if (navigator.userAgent.indexOf('MSIE') !== -1 || navigator.appVersion.indexOf('Trident/') > 0) { + this.fixbutton.css("pointer-events", "none"); + } + this.fixbutton.one("click", (function(_this) { + return function(e) { + _this.stopDrag(); + _this.fixbutton.removeClass("dragging"); + if (Math.abs(_this.fixbutton.offset().left - _this.fixbutton_initx) > 5) { + return e.preventDefault(); + } + }; + })(this)); + this.fixbutton.parents().on("mousemove", this.animDrag); + this.fixbutton.parents().on("mousemove", this.waitMove); + return this.fixbutton.parents().on("mouseup", (function(_this) { + return function(e) { + e.preventDefault(); + return _this.stopDrag(); + }; + })(this)); + }; + + Sidebar.prototype.waitMove = function(e) { + if (Math.abs(this.fixbutton.offset().left - this.fixbutton_targetx) > 10 && (+(new Date)) - this.dragStarted > 100) { + this.moved(); + return this.fixbutton.parents().off("mousemove", this.waitMove); + } + }; + + Sidebar.prototype.moved = function() { + this.log("Moved"); + this.createHtmltag(); + $(document.body).css("perspective", "1000px").addClass("body-sidebar"); + $(window).off("resize"); + $(window).on("resize", (function(_this) { + return function() { + $(document.body).css("height", $(window).height()); + return _this.scrollable(); + }; + })(this)); + $(window).trigger("resize"); + return wrapper.setSiteInfo = (function(_this) { + return function(site_info) { + _this.setSiteInfo(site_info); + return _this.original_set_site_info.apply(wrapper, arguments); + }; + })(this); + }; + + Sidebar.prototype.setSiteInfo = function(site_info) { + this.updateHtmlTag(); + return this.displayGlobe(); + }; + + Sidebar.prototype.createHtmltag = function() { + if (!this.container) { + this.container = $("
    \n
    "); + this.container.appendTo(document.body); + this.tag = this.container.find(".sidebar"); + this.updateHtmlTag(); + return this.scrollable = window.initScrollable(); + } + }; + + Sidebar.prototype.updateHtmlTag = function() { + return wrapper.ws.cmd("sidebarGetHtmlTag", {}, (function(_this) { + return function(res) { + if (_this.tag.find(".content").children().length === 0) { + _this.log("Creating content"); + morphdom(_this.tag.find(".content")[0], '
    ' + res + '
    '); + return _this.scrollable(); + } else { + _this.log("Patching content"); + return morphdom(_this.tag.find(".content")[0], '
    ' + res + '
    ', { + onBeforeMorphEl: function(from_el, to_el) { + if (from_el.className === "globe") { + return false; + } else { + return true; + } + } + }); + } + }; + })(this)); + }; + + Sidebar.prototype.animDrag = function(e) { + var mousex, overdrag, overdrag_percent, targetx; + mousex = e.pageX; + overdrag = this.fixbutton_initx - this.width - mousex; + if (overdrag > 0) { + overdrag_percent = 1 + overdrag / 300; + mousex = (e.pageX + (this.fixbutton_initx - this.width) * overdrag_percent) / (1 + overdrag_percent); + } + targetx = this.fixbutton_initx - mousex - this.fixbutton_addx; + this.fixbutton.offset({ + left: mousex + this.fixbutton_addx + }); + if (this.tag) { + this.tag.css("transform", "translateX(" + (0 - targetx) + "px)"); + } + if ((!this.opened && targetx > this.width / 3) || (this.opened && targetx > this.width * 0.9)) { + return this.fixbutton_targetx = this.fixbutton_initx - this.width; + } else { + return this.fixbutton_targetx = this.fixbutton_initx; + } + }; + + Sidebar.prototype.stopDrag = function() { + var targetx; + this.fixbutton.parents().off("mousemove"); + this.fixbutton.off("mousemove"); + this.fixbutton.css("pointer-events", ""); + $(".drag-bg").remove(); + if (!this.fixbutton.hasClass("dragging")) { + return; + } + this.fixbutton.removeClass("dragging"); + if (this.fixbutton_targetx !== this.fixbutton.offset().left) { + this.fixbutton.stop().animate({ + "left": this.fixbutton_targetx + }, 500, "easeOutBack", (function(_this) { + return function() { + if (_this.fixbutton_targetx === _this.fixbutton_initx) { + _this.fixbutton.css("left", "auto"); + } else { + _this.fixbutton.css("left", _this.fixbutton_targetx); + } + return $(".fixbutton-bg").trigger("mouseout"); + }; + })(this)); + if (this.fixbutton_targetx === this.fixbutton_initx) { + targetx = 0; + this.opened = false; + } else { + targetx = this.width; + if (!this.opened) { + this.onOpened(); + } + this.opened = true; + } + this.tag.css("transition", "0.4s ease-out"); + this.tag.css("transform", "translateX(-" + targetx + "px)").one(transitionEnd, (function(_this) { + return function() { + _this.tag.css("transition", ""); + if (!_this.opened) { + _this.container.remove(); + _this.container = null; + _this.tag.remove(); + return _this.tag = null; + } + }; + })(this)); + this.log("stopdrag", "opened:", this.opened); + if (!this.opened) { + return this.onClosed(); + } + } + }; + + Sidebar.prototype.onOpened = function() { + this.log("Opened"); + this.scrollable(); + this.tag.find("#checkbox-owned").off("click").on("click", (function(_this) { + return function() { + return setTimeout((function() { + return _this.scrollable(); + }), 300); + }; + })(this)); + this.tag.find("#button-sitelimit").on("click", (function(_this) { + return function() { + wrapper.ws.cmd("siteSetLimit", $("#input-sitelimit").val(), function() { + wrapper.notifications.add("done-sitelimit", "done", "Site storage limit modified!", 5000); + return _this.updateHtmlTag(); + }); + return false; + }; + })(this)); + this.tag.find("#checkbox-owned").on("click", (function(_this) { + return function() { + return wrapper.ws.cmd("siteSetOwned", [_this.tag.find("#checkbox-owned").is(":checked")]); + }; + })(this)); + this.tag.find("#checkbox-autodownloadoptional").on("click", (function(_this) { + return function() { + return wrapper.ws.cmd("siteSetAutodownloadoptional", [_this.tag.find("#checkbox-autodownloadoptional").is(":checked")]); + }; + })(this)); + this.tag.find("#button-identity").on("click", (function(_this) { + return function() { + wrapper.ws.cmd("certSelect"); + return false; + }; + })(this)); + this.tag.find("#checkbox-owned").on("click", (function(_this) { + return function() { + return wrapper.ws.cmd("siteSetOwned", [_this.tag.find("#checkbox-owned").is(":checked")]); + }; + })(this)); + this.tag.find("#button-settings").on("click", (function(_this) { + return function() { + wrapper.ws.cmd("fileGet", "content.json", function(res) { + var data, json_raw; + data = JSON.parse(res); + data["title"] = $("#settings-title").val(); + data["description"] = $("#settings-description").val(); + json_raw = unescape(encodeURIComponent(JSON.stringify(data, void 0, '\t'))); + return wrapper.ws.cmd("fileWrite", ["content.json", btoa(json_raw)], function(res) { + if (res !== "ok") { + return wrapper.notifications.add("file-write", "error", "File write error: " + res); + } else { + wrapper.notifications.add("file-write", "done", "Site settings saved!", 5000); + return _this.updateHtmlTag(); + } + }); + }); + return false; + }; + })(this)); + this.tag.find("#button-sign").on("click", (function(_this) { + return function() { + var inner_path; + inner_path = _this.tag.find("#select-contents").val(); + if (wrapper.site_info.privatekey) { + wrapper.ws.cmd("siteSign", ["stored", inner_path], function(res) { + return wrapper.notifications.add("sign", "done", inner_path + " Signed!", 5000); + }); + } else { + wrapper.displayPrompt("Enter your private key:", "password", "Sign", function(privatekey) { + return wrapper.ws.cmd("siteSign", [privatekey, inner_path], function(res) { + if (res === "ok") { + return wrapper.notifications.add("sign", "done", inner_path + " Signed!", 5000); + } + }); + }); + } + return false; + }; + })(this)); + this.tag.find("#button-publish").on("click", (function(_this) { + return function() { + var inner_path; + inner_path = _this.tag.find("#select-contents").val(); + _this.tag.find("#button-publish").addClass("loading"); + return wrapper.ws.cmd("sitePublish", { + "inner_path": inner_path, + "sign": false + }, function() { + return _this.tag.find("#button-publish").removeClass("loading"); + }); + }; + })(this)); + return this.loadGlobe(); + }; + + Sidebar.prototype.onClosed = function() { + $(window).off("resize"); + $(document.body).css("transition", "0.6s ease-in-out").removeClass("body-sidebar").on(transitionEnd, (function(_this) { + return function(e) { + if (e.target === document.body) { + $(document.body).css("height", "auto").css("perspective", "").css("transition", "").off(transitionEnd); + return _this.unloadGlobe(); + } + }; + })(this)); + return wrapper.setSiteInfo = this.original_set_site_info; + }; + + Sidebar.prototype.loadGlobe = function() { + if (this.tag.find(".globe").hasClass("loading")) { + return setTimeout(((function(_this) { + return function() { + if (typeof DAT === "undefined") { + return $.getScript("/uimedia/globe/all.js", _this.displayGlobe); + } else { + return _this.displayGlobe(); + } + }; + })(this)), 600); + } + }; + + Sidebar.prototype.displayGlobe = function() { + return wrapper.ws.cmd("sidebarGetPeers", [], (function(_this) { + return function(globe_data) { + if (_this.globe) { + _this.globe.scene.remove(_this.globe.points); + _this.globe.addData(globe_data, { + format: 'magnitude', + name: "hello", + animated: false + }); + _this.globe.createPoints(); + } else { + _this.globe = new DAT.Globe(_this.tag.find(".globe")[0], { + "imgDir": "/uimedia/globe/" + }); + _this.globe.addData(globe_data, { + format: 'magnitude', + name: "hello" + }); + _this.globe.createPoints(); + _this.globe.animate(); + } + return _this.tag.find(".globe").removeClass("loading"); + }; + })(this)); + }; + + Sidebar.prototype.unloadGlobe = function() { + if (!this.globe) { + return false; + } + this.globe.unload(); + return this.globe = null; + }; + + return Sidebar; + + })(Class); + + window.sidebar = new Sidebar(); + + window.transitionEnd = 'transitionend webkitTransitionEnd oTransitionEnd otransitionend'; + +}).call(this); + + +/* ---- plugins/Sidebar/media/morphdom.js ---- */ + + +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.morphdom = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o element + * since it sets the initial value. Changing the "value" + * attribute without changing the "value" property will have + * no effect since it is only used to the set the initial value. + * Similar for the "checked" attribute. + */ + /*INPUT: function(fromEl, toEl) { + fromEl.checked = toEl.checked; + fromEl.value = toEl.value; + + if (!toEl.hasAttribute('checked')) { + fromEl.removeAttribute('checked'); + } + + if (!toEl.hasAttribute('value')) { + fromEl.removeAttribute('value'); + } + }*/ +}; + +function noop() {} + +/** + * Loop over all of the attributes on the target node and make sure the + * original DOM node has the same attributes. If an attribute + * found on the original node is not on the new node then remove it from + * the original node + * @param {HTMLElement} fromNode + * @param {HTMLElement} toNode + */ +function morphAttrs(fromNode, toNode) { + var attrs = toNode.attributes; + var i; + var attr; + var attrName; + var attrValue; + var foundAttrs = {}; + + for (i=attrs.length-1; i>=0; i--) { + attr = attrs[i]; + if (attr.specified !== false) { + attrName = attr.name; + attrValue = attr.value; + foundAttrs[attrName] = true; + + if (fromNode.getAttribute(attrName) !== attrValue) { + fromNode.setAttribute(attrName, attrValue); + } + } + } + + // Delete any extra attributes found on the original DOM element that weren't + // found on the target element. + attrs = fromNode.attributes; + + for (i=attrs.length-1; i>=0; i--) { + attr = attrs[i]; + if (attr.specified !== false) { + attrName = attr.name; + if (!foundAttrs.hasOwnProperty(attrName)) { + fromNode.removeAttribute(attrName); + } + } + } +} + +/** + * Copies the children of one DOM element to another DOM element + */ +function moveChildren(from, to) { + var curChild = from.firstChild; + while(curChild) { + var nextChild = curChild.nextSibling; + to.appendChild(curChild); + curChild = nextChild; + } + return to; +} + +function morphdom(fromNode, toNode, options) { + if (!options) { + options = {}; + } + + if (typeof toNode === 'string') { + var newBodyEl = document.createElement('body'); + newBodyEl.innerHTML = toNode; + toNode = newBodyEl.childNodes[0]; + } + + var savedEls = {}; // Used to save off DOM elements with IDs + var unmatchedEls = {}; + var onNodeDiscarded = options.onNodeDiscarded || noop; + var onBeforeMorphEl = options.onBeforeMorphEl || noop; + var onBeforeMorphElChildren = options.onBeforeMorphElChildren || noop; + + function removeNodeHelper(node, nestedInSavedEl) { + var id = node.id; + // If the node has an ID then save it off since we will want + // to reuse it in case the target DOM tree has a DOM element + // with the same ID + if (id) { + savedEls[id] = node; + } else if (!nestedInSavedEl) { + // If we are not nested in a saved element then we know that this node has been + // completely discarded and will not exist in the final DOM. + onNodeDiscarded(node); + } + + if (node.nodeType === 1) { + var curChild = node.firstChild; + while(curChild) { + removeNodeHelper(curChild, nestedInSavedEl || id); + curChild = curChild.nextSibling; + } + } + } + + function walkDiscardedChildNodes(node) { + if (node.nodeType === 1) { + var curChild = node.firstChild; + while(curChild) { + + + if (!curChild.id) { + // We only want to handle nodes that don't have an ID to avoid double + // walking the same saved element. + + onNodeDiscarded(curChild); + + // Walk recursively + walkDiscardedChildNodes(curChild); + } + + curChild = curChild.nextSibling; + } + } + } + + function removeNode(node, parentNode, alreadyVisited) { + parentNode.removeChild(node); + + if (alreadyVisited) { + if (!node.id) { + onNodeDiscarded(node); + walkDiscardedChildNodes(node); + } + } else { + removeNodeHelper(node); + } + } + + function morphEl(fromNode, toNode, alreadyVisited) { + if (toNode.id) { + // If an element with an ID is being morphed then it is will be in the final + // DOM so clear it out of the saved elements collection + delete savedEls[toNode.id]; + } + + if (onBeforeMorphEl(fromNode, toNode) === false) { + return; + } + + morphAttrs(fromNode, toNode); + + if (onBeforeMorphElChildren(fromNode, toNode) === false) { + return; + } + + var curToNodeChild = toNode.firstChild; + var curFromNodeChild = fromNode.firstChild; + var curToNodeId; + + var fromNextSibling; + var toNextSibling; + var savedEl; + var unmatchedEl; + + outer: while(curToNodeChild) { + toNextSibling = curToNodeChild.nextSibling; + curToNodeId = curToNodeChild.id; + + while(curFromNodeChild) { + var curFromNodeId = curFromNodeChild.id; + fromNextSibling = curFromNodeChild.nextSibling; + + if (!alreadyVisited) { + if (curFromNodeId && (unmatchedEl = unmatchedEls[curFromNodeId])) { + unmatchedEl.parentNode.replaceChild(curFromNodeChild, unmatchedEl); + morphEl(curFromNodeChild, unmatchedEl, alreadyVisited); + curFromNodeChild = fromNextSibling; + continue; + } + } + + var curFromNodeType = curFromNodeChild.nodeType; + + if (curFromNodeType === curToNodeChild.nodeType) { + var isCompatible = false; + + if (curFromNodeType === 1) { // Both nodes being compared are Element nodes + if (curFromNodeChild.tagName === curToNodeChild.tagName) { + // We have compatible DOM elements + if (curFromNodeId || curToNodeId) { + // If either DOM element has an ID then we handle + // those differently since we want to match up + // by ID + if (curToNodeId === curFromNodeId) { + isCompatible = true; + } + } else { + isCompatible = true; + } + } + + if (isCompatible) { + // We found compatible DOM elements so add a + // task to morph the compatible DOM elements + morphEl(curFromNodeChild, curToNodeChild, alreadyVisited); + } + } else if (curFromNodeType === 3) { // Both nodes being compared are Text nodes + isCompatible = true; + curFromNodeChild.nodeValue = curToNodeChild.nodeValue; + } + + if (isCompatible) { + curToNodeChild = toNextSibling; + curFromNodeChild = fromNextSibling; + continue outer; + } + } + + // No compatible match so remove the old node from the DOM + removeNode(curFromNodeChild, fromNode, alreadyVisited); + + curFromNodeChild = fromNextSibling; + } + + if (curToNodeId) { + if ((savedEl = savedEls[curToNodeId])) { + morphEl(savedEl, curToNodeChild, true); + curToNodeChild = savedEl; // We want to append the saved element instead + } else { + // The current DOM element in the target tree has an ID + // but we did not find a match in any of the corresponding + // siblings. We just put the target element in the old DOM tree + // but if we later find an element in the old DOM tree that has + // a matching ID then we will replace the target element + // with the corresponding old element and morph the old element + unmatchedEls[curToNodeId] = curToNodeChild; + } + } + + // If we got this far then we did not find a candidate match for our "to node" + // and we exhausted all of the children "from" nodes. Therefore, we will just + // append the current "to node" to the end + fromNode.appendChild(curToNodeChild); + + curToNodeChild = toNextSibling; + curFromNodeChild = fromNextSibling; + } + + // We have processed all of the "to nodes". If curFromNodeChild is non-null then + // we still have some from nodes left over that need to be removed + while(curFromNodeChild) { + fromNextSibling = curFromNodeChild.nextSibling; + removeNode(curFromNodeChild, fromNode, alreadyVisited); + curFromNodeChild = fromNextSibling; + } + + var specialElHandler = specialElHandlers[fromNode.tagName]; + if (specialElHandler) { + specialElHandler(fromNode, toNode); + } + } + + var morphedNode = fromNode; + var morphedNodeType = morphedNode.nodeType; + var toNodeType = toNode.nodeType; + + // Handle the case where we are given two DOM nodes that are not + // compatible (e.g.
    --> or
    --> TEXT) + if (morphedNodeType === 1) { + if (toNodeType === 1) { + if (morphedNode.tagName !== toNode.tagName) { + onNodeDiscarded(fromNode); + morphedNode = moveChildren(morphedNode, document.createElement(toNode.tagName)); + } + } else { + // Going from an element node to a text node + return toNode; + } + } else if (morphedNodeType === 3) { // Text node + if (toNodeType === 3) { + morphedNode.nodeValue = toNode.nodeValue; + return morphedNode; + } else { + onNodeDiscarded(fromNode); + // Text node to something else + return toNode; + } + } + + morphEl(morphedNode, toNode, false); + + // Fire the "onNodeDiscarded" event for any saved elements + // that never found a new home in the morphed DOM + for (var savedElId in savedEls) { + if (savedEls.hasOwnProperty(savedElId)) { + var savedEl = savedEls[savedElId]; + onNodeDiscarded(savedEl); + walkDiscardedChildNodes(savedEl); + } + } + + if (morphedNode !== fromNode && fromNode.parentNode) { + fromNode.parentNode.replaceChild(morphedNode, fromNode); + } + + return morphedNode; +} + +module.exports = morphdom; +},{}]},{},[1])(1) +}); \ No newline at end of file diff --git a/plugins/Sidebar/media/morphdom.js b/plugins/Sidebar/media/morphdom.js new file mode 100644 index 00000000..6829eef3 --- /dev/null +++ b/plugins/Sidebar/media/morphdom.js @@ -0,0 +1,340 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.morphdom = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o element + * since it sets the initial value. Changing the "value" + * attribute without changing the "value" property will have + * no effect since it is only used to the set the initial value. + * Similar for the "checked" attribute. + */ + /*INPUT: function(fromEl, toEl) { + fromEl.checked = toEl.checked; + fromEl.value = toEl.value; + + if (!toEl.hasAttribute('checked')) { + fromEl.removeAttribute('checked'); + } + + if (!toEl.hasAttribute('value')) { + fromEl.removeAttribute('value'); + } + }*/ +}; + +function noop() {} + +/** + * Loop over all of the attributes on the target node and make sure the + * original DOM node has the same attributes. If an attribute + * found on the original node is not on the new node then remove it from + * the original node + * @param {HTMLElement} fromNode + * @param {HTMLElement} toNode + */ +function morphAttrs(fromNode, toNode) { + var attrs = toNode.attributes; + var i; + var attr; + var attrName; + var attrValue; + var foundAttrs = {}; + + for (i=attrs.length-1; i>=0; i--) { + attr = attrs[i]; + if (attr.specified !== false) { + attrName = attr.name; + attrValue = attr.value; + foundAttrs[attrName] = true; + + if (fromNode.getAttribute(attrName) !== attrValue) { + fromNode.setAttribute(attrName, attrValue); + } + } + } + + // Delete any extra attributes found on the original DOM element that weren't + // found on the target element. + attrs = fromNode.attributes; + + for (i=attrs.length-1; i>=0; i--) { + attr = attrs[i]; + if (attr.specified !== false) { + attrName = attr.name; + if (!foundAttrs.hasOwnProperty(attrName)) { + fromNode.removeAttribute(attrName); + } + } + } +} + +/** + * Copies the children of one DOM element to another DOM element + */ +function moveChildren(from, to) { + var curChild = from.firstChild; + while(curChild) { + var nextChild = curChild.nextSibling; + to.appendChild(curChild); + curChild = nextChild; + } + return to; +} + +function morphdom(fromNode, toNode, options) { + if (!options) { + options = {}; + } + + if (typeof toNode === 'string') { + var newBodyEl = document.createElement('body'); + newBodyEl.innerHTML = toNode; + toNode = newBodyEl.childNodes[0]; + } + + var savedEls = {}; // Used to save off DOM elements with IDs + var unmatchedEls = {}; + var onNodeDiscarded = options.onNodeDiscarded || noop; + var onBeforeMorphEl = options.onBeforeMorphEl || noop; + var onBeforeMorphElChildren = options.onBeforeMorphElChildren || noop; + + function removeNodeHelper(node, nestedInSavedEl) { + var id = node.id; + // If the node has an ID then save it off since we will want + // to reuse it in case the target DOM tree has a DOM element + // with the same ID + if (id) { + savedEls[id] = node; + } else if (!nestedInSavedEl) { + // If we are not nested in a saved element then we know that this node has been + // completely discarded and will not exist in the final DOM. + onNodeDiscarded(node); + } + + if (node.nodeType === 1) { + var curChild = node.firstChild; + while(curChild) { + removeNodeHelper(curChild, nestedInSavedEl || id); + curChild = curChild.nextSibling; + } + } + } + + function walkDiscardedChildNodes(node) { + if (node.nodeType === 1) { + var curChild = node.firstChild; + while(curChild) { + + + if (!curChild.id) { + // We only want to handle nodes that don't have an ID to avoid double + // walking the same saved element. + + onNodeDiscarded(curChild); + + // Walk recursively + walkDiscardedChildNodes(curChild); + } + + curChild = curChild.nextSibling; + } + } + } + + function removeNode(node, parentNode, alreadyVisited) { + parentNode.removeChild(node); + + if (alreadyVisited) { + if (!node.id) { + onNodeDiscarded(node); + walkDiscardedChildNodes(node); + } + } else { + removeNodeHelper(node); + } + } + + function morphEl(fromNode, toNode, alreadyVisited) { + if (toNode.id) { + // If an element with an ID is being morphed then it is will be in the final + // DOM so clear it out of the saved elements collection + delete savedEls[toNode.id]; + } + + if (onBeforeMorphEl(fromNode, toNode) === false) { + return; + } + + morphAttrs(fromNode, toNode); + + if (onBeforeMorphElChildren(fromNode, toNode) === false) { + return; + } + + var curToNodeChild = toNode.firstChild; + var curFromNodeChild = fromNode.firstChild; + var curToNodeId; + + var fromNextSibling; + var toNextSibling; + var savedEl; + var unmatchedEl; + + outer: while(curToNodeChild) { + toNextSibling = curToNodeChild.nextSibling; + curToNodeId = curToNodeChild.id; + + while(curFromNodeChild) { + var curFromNodeId = curFromNodeChild.id; + fromNextSibling = curFromNodeChild.nextSibling; + + if (!alreadyVisited) { + if (curFromNodeId && (unmatchedEl = unmatchedEls[curFromNodeId])) { + unmatchedEl.parentNode.replaceChild(curFromNodeChild, unmatchedEl); + morphEl(curFromNodeChild, unmatchedEl, alreadyVisited); + curFromNodeChild = fromNextSibling; + continue; + } + } + + var curFromNodeType = curFromNodeChild.nodeType; + + if (curFromNodeType === curToNodeChild.nodeType) { + var isCompatible = false; + + if (curFromNodeType === 1) { // Both nodes being compared are Element nodes + if (curFromNodeChild.tagName === curToNodeChild.tagName) { + // We have compatible DOM elements + if (curFromNodeId || curToNodeId) { + // If either DOM element has an ID then we handle + // those differently since we want to match up + // by ID + if (curToNodeId === curFromNodeId) { + isCompatible = true; + } + } else { + isCompatible = true; + } + } + + if (isCompatible) { + // We found compatible DOM elements so add a + // task to morph the compatible DOM elements + morphEl(curFromNodeChild, curToNodeChild, alreadyVisited); + } + } else if (curFromNodeType === 3) { // Both nodes being compared are Text nodes + isCompatible = true; + curFromNodeChild.nodeValue = curToNodeChild.nodeValue; + } + + if (isCompatible) { + curToNodeChild = toNextSibling; + curFromNodeChild = fromNextSibling; + continue outer; + } + } + + // No compatible match so remove the old node from the DOM + removeNode(curFromNodeChild, fromNode, alreadyVisited); + + curFromNodeChild = fromNextSibling; + } + + if (curToNodeId) { + if ((savedEl = savedEls[curToNodeId])) { + morphEl(savedEl, curToNodeChild, true); + curToNodeChild = savedEl; // We want to append the saved element instead + } else { + // The current DOM element in the target tree has an ID + // but we did not find a match in any of the corresponding + // siblings. We just put the target element in the old DOM tree + // but if we later find an element in the old DOM tree that has + // a matching ID then we will replace the target element + // with the corresponding old element and morph the old element + unmatchedEls[curToNodeId] = curToNodeChild; + } + } + + // If we got this far then we did not find a candidate match for our "to node" + // and we exhausted all of the children "from" nodes. Therefore, we will just + // append the current "to node" to the end + fromNode.appendChild(curToNodeChild); + + curToNodeChild = toNextSibling; + curFromNodeChild = fromNextSibling; + } + + // We have processed all of the "to nodes". If curFromNodeChild is non-null then + // we still have some from nodes left over that need to be removed + while(curFromNodeChild) { + fromNextSibling = curFromNodeChild.nextSibling; + removeNode(curFromNodeChild, fromNode, alreadyVisited); + curFromNodeChild = fromNextSibling; + } + + var specialElHandler = specialElHandlers[fromNode.tagName]; + if (specialElHandler) { + specialElHandler(fromNode, toNode); + } + } + + var morphedNode = fromNode; + var morphedNodeType = morphedNode.nodeType; + var toNodeType = toNode.nodeType; + + // Handle the case where we are given two DOM nodes that are not + // compatible (e.g.
    --> or
    --> TEXT) + if (morphedNodeType === 1) { + if (toNodeType === 1) { + if (morphedNode.tagName !== toNode.tagName) { + onNodeDiscarded(fromNode); + morphedNode = moveChildren(morphedNode, document.createElement(toNode.tagName)); + } + } else { + // Going from an element node to a text node + return toNode; + } + } else if (morphedNodeType === 3) { // Text node + if (toNodeType === 3) { + morphedNode.nodeValue = toNode.nodeValue; + return morphedNode; + } else { + onNodeDiscarded(fromNode); + // Text node to something else + return toNode; + } + } + + morphEl(morphedNode, toNode, false); + + // Fire the "onNodeDiscarded" event for any saved elements + // that never found a new home in the morphed DOM + for (var savedElId in savedEls) { + if (savedEls.hasOwnProperty(savedElId)) { + var savedEl = savedEls[savedElId]; + onNodeDiscarded(savedEl); + walkDiscardedChildNodes(savedEl); + } + } + + if (morphedNode !== fromNode && fromNode.parentNode) { + fromNode.parentNode.replaceChild(morphedNode, fromNode); + } + + return morphedNode; +} + +module.exports = morphdom; +},{}]},{},[1])(1) +}); \ No newline at end of file diff --git a/plugins/Stats/StatsPlugin.py b/plugins/Stats/StatsPlugin.py new file mode 100644 index 00000000..cef76c70 --- /dev/null +++ b/plugins/Stats/StatsPlugin.py @@ -0,0 +1,610 @@ +import time +import cgi +import os + +from Plugin import PluginManager +from Config import config + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + + def formatTableRow(self, row): + back = [] + for format, val in row: + if val is None: + formatted = "n/a" + elif format == "since": + if val: + formatted = "%.0f" % (time.time() - val) + else: + formatted = "n/a" + else: + formatted = format % val + back.append("%s" % formatted) + return "%s" % "".join(back) + + def getObjSize(self, obj, hpy=None): + if hpy: + return float(hpy.iso(obj).domisize) / 1024 + else: + return 0 + + # /Stats entry point + def actionStats(self): + import gc + import sys + from Ui import UiRequest + from Crypt import CryptConnection + + hpy = None + if self.get.get("size") == "1": # Calc obj size + try: + import guppy + hpy = guppy.hpy() + except: + pass + self.sendHeader() + s = time.time() + main = sys.modules["main"] + + # Style + yield """ + + """ + + # Memory + try: + yield "rev%s | " % config.rev + yield "%s | " % config.ip_external + yield "Opened: %s | " % main.file_server.port_opened + yield "Crypt: %s | " % CryptConnection.manager.crypt_supported + yield "In: %.2fMB, Out: %.2fMB | " % ( + float(main.file_server.bytes_recv) / 1024 / 1024, + float(main.file_server.bytes_sent) / 1024 / 1024 + ) + yield "Peerid: %s | " % main.file_server.peer_id + import psutil + process = psutil.Process(os.getpid()) + mem = process.get_memory_info()[0] / float(2 ** 20) + yield "Mem: %.2fMB | " % mem + yield "Threads: %s | " % len(process.threads()) + yield "CPU: usr %.2fs sys %.2fs | " % process.cpu_times() + yield "Files: %s | " % len(process.open_files()) + yield "Sockets: %s | " % len(process.connections()) + yield "Calc size on off" + except Exception: + pass + yield "
    " + + # Connections + yield "Connections (%s, total made: %s):
    " % ( + len(main.file_server.connections), main.file_server.last_connection_id + ) + yield "" + yield "" + yield "" + for connection in main.file_server.connections: + if "cipher" in dir(connection.sock): + cipher = connection.sock.cipher()[0] + else: + cipher = connection.crypt + yield self.formatTableRow([ + ("%3d", connection.id), + ("%s", connection.protocol), + ("%s", connection.type), + ("%s:%s", (connection.ip, connection.port)), + ("%s", connection.handshake.get("port_opened")), + ("%s", (connection.crypt, cipher)), + ("%6.3f", connection.last_ping_delay), + ("%s", connection.incomplete_buff_recv), + ("since", max(connection.last_send_time, connection.last_recv_time)), + ("since", connection.start_time), + ("%.3f", connection.last_sent_time - connection.last_send_time), + ("%.0fkB", connection.bytes_sent / 1024), + ("%.0fkB", connection.bytes_recv / 1024), + ("%s", connection.last_cmd), + ("%s", connection.waiting_requests.keys()), + ("%s r%s", (connection.handshake.get("version"), connection.handshake.get("rev", "?"))), + ("%s", connection.handshake.get("peer_id")), + ]) + yield "
    id proto type ip open crypt pingbuff idle open delay out in last sentwaiting version peerid
    " + + # Sites + yield "

    Sites:" + yield "" + yield "" + for site in self.server.sites.values(): + yield self.formatTableRow([ + ( + """%s""", + (site.address, site.address) + ), + ("%s", [peer.connection.id for peer in site.peers.values() if peer.connection and peer.connection.connected]), + ("%s/%s/%s", ( + len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]), + len(site.getConnectablePeers(100)), + len(site.peers) + )), + ("%s", len(site.content_manager.contents)), + ("%.0fkB", site.settings.get("bytes_sent", 0) / 1024), + ("%.0fkB", site.settings.get("bytes_recv", 0) / 1024), + ]) + yield "" + yield "
    address connected peers content.json out in
    " + + # No more if not in debug mode + if not config.debug: + raise StopIteration + + # Object types + + + obj_count = {} + for obj in gc.get_objects(): + obj_type = str(type(obj)) + if obj_type not in obj_count: + obj_count[obj_type] = [0, 0] + obj_count[obj_type][0] += 1 # Count + obj_count[obj_type][1] += float(sys.getsizeof(obj)) / 1024 # Size + + yield "

    Objects in memory (types: %s, total: %s, %.2fkb):
    " % ( + len(obj_count), + sum([stat[0] for stat in obj_count.values()]), + sum([stat[1] for stat in obj_count.values()]) + ) + + for obj, stat in sorted(obj_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count + yield " - %.1fkb = %s x %s
    " % (stat[1], stat[0], obj, cgi.escape(obj)) + + # Classes + + class_count = {} + for obj in gc.get_objects(): + obj_type = str(type(obj)) + if obj_type != "": + continue + class_name = obj.__class__.__name__ + if class_name not in class_count: + class_count[class_name] = [0, 0] + class_count[class_name][0] += 1 # Count + class_count[class_name][1] += float(sys.getsizeof(obj)) / 1024 # Size + + yield "

    Classes in memory (types: %s, total: %s, %.2fkb):
    " % ( + len(class_count), + sum([stat[0] for stat in class_count.values()]), + sum([stat[1] for stat in class_count.values()]) + ) + + for obj, stat in sorted(class_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count + yield " - %.1fkb = %s x %s
    " % (stat[1], stat[0], obj, cgi.escape(obj)) + + from greenlet import greenlet + objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)] + yield "
    Greenlets (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + from Worker import Worker + objs = [obj for obj in gc.get_objects() if isinstance(obj, Worker)] + yield "
    Workers (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + from Connection import Connection + objs = [obj for obj in gc.get_objects() if isinstance(obj, Connection)] + yield "
    Connections (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + from socket import socket + objs = [obj for obj in gc.get_objects() if isinstance(obj, socket)] + yield "
    Sockets (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + from msgpack import Unpacker + objs = [obj for obj in gc.get_objects() if isinstance(obj, Unpacker)] + yield "
    Msgpack unpacker (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + from Site import Site + objs = [obj for obj in gc.get_objects() if isinstance(obj, Site)] + yield "
    Sites (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + objs = [obj for obj in gc.get_objects() if isinstance(obj, self.server.log.__class__)] + yield "
    Loggers (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj.name))) + + objs = [obj for obj in gc.get_objects() if isinstance(obj, UiRequest)] + yield "
    UiRequests (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + from Peer import Peer + objs = [obj for obj in gc.get_objects() if isinstance(obj, Peer)] + yield "
    Peers (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + objs = [(key, val) for key, val in sys.modules.iteritems() if val is not None] + objs.sort() + yield "
    Modules (%s):
    " % len(objs) + for module_name, module in objs: + yield " - %.3fkb: %s %s
    " % (self.getObjSize(module, hpy), module_name, cgi.escape(repr(module))) + + gc.collect() # Implicit grabage collection + yield "Done in %.1f" % (time.time() - s) + + def actionDumpobj(self): + + import gc + import sys + + self.sendHeader() + + # No more if not in debug mode + if not config.debug: + yield "Not in debug mode" + raise StopIteration + + class_filter = self.get.get("class") + + yield """ + + """ + + objs = gc.get_objects() + for obj in objs: + obj_type = str(type(obj)) + if obj_type != "" or obj.__class__.__name__ != class_filter: + continue + yield "%.1fkb %s... " % (float(sys.getsizeof(obj)) / 1024, cgi.escape(str(obj))) + for attr in dir(obj): + yield "- %s: %s
    " % (attr, cgi.escape(str(getattr(obj, attr)))) + yield "
    " + + gc.collect() # Implicit grabage collection + + def actionListobj(self): + + import gc + import sys + + self.sendHeader() + + # No more if not in debug mode + if not config.debug: + yield "Not in debug mode" + raise StopIteration + + type_filter = self.get.get("type") + + yield """ + + """ + + yield "Listing all %s objects in memory...
    " % cgi.escape(type_filter) + + ref_count = {} + objs = gc.get_objects() + for obj in objs: + obj_type = str(type(obj)) + if obj_type != type_filter: + continue + refs = [ + ref for ref in gc.get_referrers(obj) + if hasattr(ref, "__class__") and + ref.__class__.__name__ not in ["list", "dict", "function", "type", "frame", "WeakSet", "tuple"] + ] + if not refs: + continue + yield "%.1fkb %s... " % ( + float(sys.getsizeof(obj)) / 1024, cgi.escape(str(obj)), cgi.escape(str(obj)[0:100].ljust(100)) + ) + for ref in refs: + yield " [" + if "object at" in str(ref) or len(str(ref)) > 100: + yield str(ref.__class__.__name__) + else: + yield str(ref.__class__.__name__) + ":" + cgi.escape(str(ref)) + yield "] " + ref_type = ref.__class__.__name__ + if ref_type not in ref_count: + ref_count[ref_type] = [0, 0] + ref_count[ref_type][0] += 1 # Count + ref_count[ref_type][1] += float(sys.getsizeof(obj)) / 1024 # Size + yield "
    " + + yield "
    Object referrer (total: %s, %.2fkb):
    " % (len(ref_count), sum([stat[1] for stat in ref_count.values()])) + + for obj, stat in sorted(ref_count.items(), key=lambda x: x[1][0], reverse=True)[0:30]: # Sorted by count + yield " - %.1fkb = %s x %s
    " % (stat[1], stat[0], cgi.escape(str(obj))) + + gc.collect() # Implicit grabage collection + + def actionBenchmark(self): + import sys + import gc + from contextlib import contextmanager + + output = self.sendHeader() + + @contextmanager + def benchmark(name, standard): + s = time.time() + output("- %s" % name) + try: + yield 1 + except Exception, err: + output("
    ! Error: %s
    " % err) + taken = time.time() - s + multipler = standard / taken + if multipler < 0.3: + speed = "Sloooow" + elif multipler < 0.5: + speed = "Ehh" + elif multipler < 0.8: + speed = "Goodish" + elif multipler < 1.2: + speed = "OK" + elif multipler < 1.7: + speed = "Fine" + elif multipler < 2.5: + speed = "Fast" + elif multipler < 3.5: + speed = "WOW" + else: + speed = "Insane!!" + output("%.3fs [x%.2f: %s]
    " % (taken, multipler, speed)) + time.sleep(0.01) + + yield """ + + """ + + yield "Benchmarking ZeroNet %s (rev%s) Python %s on: %s...
    " % (config.version, config.rev, sys.version, sys.platform) + + t = time.time() + + # CryptBitcoin + yield "
    CryptBitcoin:
    " + from Crypt import CryptBitcoin + + # seed = CryptBitcoin.newSeed() + # yield "- Seed: %s
    " % seed + seed = "e180efa477c63b0f2757eac7b1cce781877177fe0966be62754ffd4c8592ce38" + + with benchmark("hdPrivatekey x 10", 0.7): + for i in range(10): + privatekey = CryptBitcoin.hdPrivatekey(seed, i * 10) + yield "." + valid = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk" + assert privatekey == valid, "%s != %s" % (privatekey, valid) + + data = "Hello" * 1024 # 5k + with benchmark("sign x 10", 0.35): + for i in range(10): + yield "." + sign = CryptBitcoin.sign(data, privatekey) + valid = "HFGXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOi+4+BbWHjuwmx0EaKNV1G+kP0tQDxWu0YApxwxZbSmZU=" + assert sign == valid, "%s != %s" % (sign, valid) + + address = CryptBitcoin.privatekeyToAddress(privatekey) + if CryptBitcoin.opensslVerify: # Openssl avalible + with benchmark("openssl verify x 100", 0.37): + for i in range(100): + if i % 10 == 0: + yield "." + ok = CryptBitcoin.verify(data, address, sign) + assert ok, "does not verify from %s" % address + else: + yield " - openssl verify x 100...not avalible :(
    " + + openssl_verify_bk = CryptBitcoin.opensslVerify # Emulate openssl not found in any way + CryptBitcoin.opensslVerify = None + with benchmark("pure-python verify x 10", 1.6): + for i in range(10): + yield "." + ok = CryptBitcoin.verify(data, address, sign) + assert ok, "does not verify from %s" % address + CryptBitcoin.opensslVerify = openssl_verify_bk + + # CryptHash + yield "
    CryptHash:
    " + from Crypt import CryptHash + from cStringIO import StringIO + + data = StringIO("Hello" * 1024 * 1024) # 5m + with benchmark("sha512 x 100 000", 1): + for i in range(10): + for y in range(10000): + hash = CryptHash.sha512sum(data) + yield "." + valid = "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce" + assert hash == valid, "%s != %s" % (hash, valid) + + with benchmark("os.urandom(256) x 100 000", 0.65): + for i in range(10): + for y in range(10000): + data = os.urandom(256) + yield "." + + # Msgpack + yield "
    Msgpack:
    " + import msgpack + binary = 'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv' + data = {"int": 1024*1024*1024, "float": 12345.67890, "text": "hello"*1024, "binary": binary} + with benchmark("pack 5K x 10 000", 0.78): + for i in range(10): + for y in range(1000): + data_packed = msgpack.packb(data) + yield "." + valid = """\x84\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00hellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohello\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa6binary\xda\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv""" + assert data_packed == valid, "%s
    !=
    %s" % (repr(data_packed), repr(valid)) + + with benchmark("unpack 5K x 10 000", 1.2): + for i in range(10): + for y in range(1000): + data_unpacked = msgpack.unpackb(data_packed) + yield "." + assert data == data_unpacked, "%s != %s" % (data_unpack, data) + + with benchmark("streaming unpack 5K x 10 000", 1.4): + for i in range(10): + unpacker = msgpack.Unpacker() + for y in range(1000): + unpacker.feed(data_packed) + for data_unpacked in unpacker: + pass + yield "." + assert data == data_unpacked, "%s != %s" % (data_unpack, data) + + # Db + yield "
    Db:
    " + from Db import Db + + schema = { + "db_name": "TestDb", + "db_file": "%s/benchmark.db" % config.data_dir, + "maps": { + ".*": { + "to_table": { + "test": "test" + } + } + }, + "tables": { + "test": { + "cols": [ + ["test_id", "INTEGER"], + ["title", "TEXT"], + ["json_id", "INTEGER REFERENCES json (json_id)"] + ], + "indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"], + "schema_changed": 1426195822 + } + } + } + + if os.path.isfile("%s/benchmark.db" % config.data_dir): + os.unlink("%s/benchmark.db" % config.data_dir) + + with benchmark("Open x 10", 0.13): + for i in range(10): + db = Db(schema, "%s/benchmark.db" % config.data_dir) + db.checkTables() + db.close() + yield "." + + db = Db(schema, "%s/benchmark.db" % config.data_dir) + db.checkTables() + import json + + with benchmark("Insert x 10 x 1000", 1.0): + for u in range(10): # 10 user + data = {"test": []} + for i in range(1000): # 1000 line of data + data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)}) + json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w")) + db.loadJson("%s/test_%s.json" % (config.data_dir, u)) + os.unlink("%s/test_%s.json" % (config.data_dir, u)) + yield "." + + with benchmark("Buffered insert x 100 x 100", 1.3): + cur = db.getCursor() + cur.execute("BEGIN") + cur.logging = False + for u in range(100, 200): # 100 user + data = {"test": []} + for i in range(100): # 1000 line of data + data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)}) + json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w")) + db.loadJson("%s/test_%s.json" % (config.data_dir, u), cur=cur) + os.unlink("%s/test_%s.json" % (config.data_dir, u)) + if u % 10 == 0: + yield "." + cur.execute("COMMIT") + + yield " - Total rows in db: %s
    " % db.execute("SELECT COUNT(*) AS num FROM test").fetchone()[0] + + with benchmark("Indexed query x 1000", 0.25): + found = 0 + cur = db.getCursor() + cur.logging = False + for i in range(1000): # 1000x by test_id + res = cur.execute("SELECT * FROM test WHERE test_id = %s" % i) + for row in res: + found += 1 + if i % 100 == 0: + yield "." + + assert found == 20000, "Found: %s != 20000" % found + + with benchmark("Not indexed query x 100", 0.6): + found = 0 + cur = db.getCursor() + cur.logging = False + for i in range(100): # 1000x by test_id + res = cur.execute("SELECT * FROM test WHERE json_id = %s" % i) + for row in res: + found += 1 + if i % 10 == 0: + yield "." + + assert found == 18900, "Found: %s != 18900" % found + + with benchmark("Like query x 100", 1.8): + found = 0 + cur = db.getCursor() + cur.logging = False + for i in range(100): # 1000x by test_id + res = cur.execute("SELECT * FROM test WHERE title LIKE '%%message %s%%'" % i) + for row in res: + found += 1 + if i % 10 == 0: + yield "." + + assert found == 38900, "Found: %s != 11000" % found + + db.close() + if os.path.isfile("%s/benchmark.db" % config.data_dir): + os.unlink("%s/benchmark.db" % config.data_dir) + + gc.collect() # Implicit grabage collection + + yield "
    Done. Total: %.2fs" % (time.time() - t) + + def actionGcCollect(self): + import gc + self.sendHeader() + yield str(gc.collect()) diff --git a/plugins/Stats/__init__.py b/plugins/Stats/__init__.py new file mode 100644 index 00000000..90bd9d6e --- /dev/null +++ b/plugins/Stats/__init__.py @@ -0,0 +1 @@ +import StatsPlugin \ No newline at end of file diff --git a/plugins/Trayicon/TrayiconPlugin.py b/plugins/Trayicon/TrayiconPlugin.py new file mode 100644 index 00000000..2d71c55a --- /dev/null +++ b/plugins/Trayicon/TrayiconPlugin.py @@ -0,0 +1,138 @@ +import time +import os +import sys +import atexit + +from Plugin import PluginManager +from Config import config + +allow_reload = False # No source reload supported in this plugin + + +@PluginManager.registerTo("Actions") +class ActionsPlugin(object): + + def main(self): + global notificationicon, winfolders + from lib import notificationicon, winfolders + import gevent.threadpool + + self.main = sys.modules["main"] + + fs_encoding = sys.getfilesystemencoding() + + icon = notificationicon.NotificationIcon( + os.path.join(os.path.dirname(os.path.abspath(__file__).decode(fs_encoding)), 'trayicon.ico'), + "ZeroNet %s" % config.version + ) + self.icon = icon + + if not config.debug: # Hide console if not in debug mode + notificationicon.hideConsole() + self.console = False + else: + self.console = True + + @atexit.register + def hideIcon(): + icon.die() + + ui_ip = config.ui_ip if config.ui_ip != "*" else "127.0.0.1" + + icon.items = ( + (self.titleIp, False), + (self.titleConnections, False), + (self.titleTransfer, False), + (self.titleConsole, self.toggleConsole), + (self.titleAutorun, self.toggleAutorun), + "--", + ("ZeroNet Twitter", lambda: self.opensite("https://twitter.com/HelloZeroNet")), + ("ZeroNet Reddit", lambda: self.opensite("http://www.reddit.com/r/zeronet/")), + ("ZeroNet Github", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet")), + ("Report bug/request feature", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet/issues")), + "--", + ("!Open ZeroNet", lambda: self.opensite("http://%s:%s" % (ui_ip, config.ui_port))), + "--", + ("Quit", self.quit), + + ) + + icon.clicked = lambda: self.opensite("http://%s:%s" % (ui_ip, config.ui_port)) + gevent.threadpool.start_new_thread(icon._run, ()) # Start in real thread (not gevent compatible) + super(ActionsPlugin, self).main() + icon._die = True + + def quit(self): + self.icon.die() + time.sleep(0.1) + sys.exit() + # self.main.ui_server.stop() + # self.main.file_server.stop() + + def opensite(self, url): + import webbrowser + webbrowser.open(url, new=0) + + def titleIp(self): + title = "!IP: %s" % config.ip_external + if self.main.file_server.port_opened: + title += " (active)" + else: + title += " (passive)" + return title + + def titleConnections(self): + title = "Connections: %s" % len(self.main.file_server.connections) + return title + + def titleTransfer(self): + title = "Received: %.2f MB | Sent: %.2f MB" % ( + float(self.main.file_server.bytes_recv) / 1024 / 1024, + float(self.main.file_server.bytes_sent) / 1024 / 1024 + ) + return title + + def titleConsole(self): + if self.console: + return "+Show console window" + else: + return "Show console window" + + def toggleConsole(self): + if self.console: + notificationicon.hideConsole() + self.console = False + else: + notificationicon.showConsole() + self.console = True + + def getAutorunPath(self): + return "%s\\zeronet.cmd" % winfolders.get(winfolders.STARTUP) + + def formatAutorun(self): + args = sys.argv[:] + args.insert(0, sys.executable) + if sys.platform == 'win32': + args = ['"%s"' % arg for arg in args] + cmd = " ".join(args) + + # Dont open browser on autorun + cmd = cmd.replace("start.py", "zeronet.py").replace('"--open_browser"', "").replace('"default_browser"', "").strip() + + return "@echo off\ncd /D %s\n%s" % (os.getcwd(), cmd) + + def isAutorunEnabled(self): + path = self.getAutorunPath() + return os.path.isfile(path) and open(path).read() == self.formatAutorun() + + def titleAutorun(self): + if self.isAutorunEnabled(): + return "+Start ZeroNet when Windows starts" + else: + return "Start ZeroNet when Windows starts" + + def toggleAutorun(self): + if self.isAutorunEnabled(): + os.unlink(self.getAutorunPath()) + else: + open(self.getAutorunPath(), "w").write(self.formatAutorun()) diff --git a/plugins/Trayicon/__init__.py b/plugins/Trayicon/__init__.py new file mode 100644 index 00000000..5b584962 --- /dev/null +++ b/plugins/Trayicon/__init__.py @@ -0,0 +1,4 @@ +import sys + +if sys.platform == 'win32': + import TrayiconPlugin \ No newline at end of file diff --git a/plugins/Trayicon/lib/__init__.py b/plugins/Trayicon/lib/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/plugins/Trayicon/lib/notificationicon.py b/plugins/Trayicon/lib/notificationicon.py new file mode 100644 index 00000000..b6fd25dc --- /dev/null +++ b/plugins/Trayicon/lib/notificationicon.py @@ -0,0 +1,806 @@ +# Pure ctypes windows taskbar notification icon +# via https://gist.github.com/jasonbot/5759510 +# Modified for ZeroNet + +import ctypes +import ctypes.wintypes +import os +#import threading +#import Queue +import uuid +import time +import gevent + +__all__ = ['NotificationIcon'] + +# Create popup menu + +CreatePopupMenu = ctypes.windll.user32.CreatePopupMenu +CreatePopupMenu.restype = ctypes.wintypes.HMENU +CreatePopupMenu.argtypes = [] + +MF_BYCOMMAND = 0x0 +MF_BYPOSITION = 0x400 + +MF_BITMAP = 0x4 +MF_CHECKED = 0x8 +MF_DISABLED = 0x2 +MF_ENABLED = 0x0 +MF_GRAYED = 0x1 +MF_MENUBARBREAK = 0x20 +MF_MENUBREAK = 0x40 +MF_OWNERDRAW = 0x100 +MF_POPUP = 0x10 +MF_SEPARATOR = 0x800 +MF_STRING = 0x0 +MF_UNCHECKED = 0x0 + +InsertMenu = ctypes.windll.user32.InsertMenuW +InsertMenu.restype = ctypes.wintypes.BOOL +InsertMenu.argtypes = [ctypes.wintypes.HMENU, ctypes.wintypes.UINT, ctypes.wintypes.UINT, ctypes.wintypes.UINT, ctypes.wintypes.LPCWSTR] + +AppendMenu = ctypes.windll.user32.AppendMenuW +AppendMenu.restype = ctypes.wintypes.BOOL +AppendMenu.argtypes = [ctypes.wintypes.HMENU, ctypes.wintypes.UINT, ctypes.wintypes.UINT, ctypes.wintypes.LPCWSTR] + +SetMenuDefaultItem = ctypes.windll.user32.SetMenuDefaultItem +SetMenuDefaultItem.restype = ctypes.wintypes.BOOL +SetMenuDefaultItem.argtypes = [ctypes.wintypes.HMENU, ctypes.wintypes.UINT, ctypes.wintypes.UINT] + +#class MENUITEMINFO(ctypes.Structure): +# UINT cbSize; +# UINT fMask; +# UINT fType; +# UINT fState; +# UINT wID; +# HMENU hSubMenu; +# HBITMAP hbmpChecked; +# HBITMAP hbmpUnchecked; +# ULONG_PTR dwItemData; +# LPTSTR dwTypeData; +# UINT cch; +# HBITMAP hbmpItem; +# +#BOOL WINAPI InsertMenuItem( +# __in HMENU hMenu, +# __in UINT uItem, +# __in BOOL fByPosition, +# __in LPCMENUITEMINFO lpmii +#); +# + +class POINT(ctypes.Structure): + _fields_ = [ ('x', ctypes.wintypes.LONG), + ('y', ctypes.wintypes.LONG)] + +GetCursorPos = ctypes.windll.user32.GetCursorPos +GetCursorPos.argtypes = [ctypes.POINTER(POINT)] + +SetForegroundWindow = ctypes.windll.user32.SetForegroundWindow +SetForegroundWindow.argtypes = [ctypes.wintypes.HWND] + +TPM_LEFTALIGN = 0x0 +TPM_CENTERALIGN = 0x4 +TPM_RIGHTALIGN = 0x8 + +TPM_TOPALIGN = 0x0 +TPM_VCENTERALIGN = 0x10 +TPM_BOTTOMALIGN = 0x20 + +TPM_NONOTIFY = 0x80 +TPM_RETURNCMD = 0x100 + +TPM_LEFTBUTTON = 0x0 +TPM_RIGHTBUTTON = 0x2 + +TPM_HORNEGANIMATION = 0x800 +TPM_HORPOSANIMATION = 0x400 +TPM_NOANIMATION = 0x4000 +TPM_VERNEGANIMATION = 0x2000 +TPM_VERPOSANIMATION = 0x1000 + +TrackPopupMenu = ctypes.windll.user32.TrackPopupMenu +TrackPopupMenu.restype = ctypes.wintypes.BOOL +TrackPopupMenu.argtypes = [ctypes.wintypes.HMENU, ctypes.wintypes.UINT, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.wintypes.HWND, ctypes.c_void_p] + +PostMessage = ctypes.windll.user32.PostMessageW +PostMessage.restype = ctypes.wintypes.BOOL +PostMessage.argtypes = [ctypes.wintypes.HWND, ctypes.wintypes.UINT, ctypes.wintypes.WPARAM, ctypes.wintypes.LPARAM] + +DestroyMenu = ctypes.windll.user32.DestroyMenu +DestroyMenu.restype = ctypes.wintypes.BOOL +DestroyMenu.argtypes = [ctypes.wintypes.HMENU] + +# Create notification icon + +GUID = ctypes.c_ubyte * 16 + +class TimeoutVersionUnion(ctypes.Union): + _fields_ = [('uTimeout', ctypes.wintypes.UINT), + ('uVersion', ctypes.wintypes.UINT),] + +NIS_HIDDEN = 0x1 +NIS_SHAREDICON = 0x2 + +class NOTIFYICONDATA(ctypes.Structure): + def __init__(self, *args, **kwargs): + super(NOTIFYICONDATA, self).__init__(*args, **kwargs) + self.cbSize = ctypes.sizeof(self) + _fields_ = [ + ('cbSize', ctypes.wintypes.DWORD), + ('hWnd', ctypes.wintypes.HWND), + ('uID', ctypes.wintypes.UINT), + ('uFlags', ctypes.wintypes.UINT), + ('uCallbackMessage', ctypes.wintypes.UINT), + ('hIcon', ctypes.wintypes.HICON), + ('szTip', ctypes.wintypes.WCHAR * 64), + ('dwState', ctypes.wintypes.DWORD), + ('dwStateMask', ctypes.wintypes.DWORD), + ('szInfo', ctypes.wintypes.WCHAR * 256), + ('union', TimeoutVersionUnion), + ('szInfoTitle', ctypes.wintypes.WCHAR * 64), + ('dwInfoFlags', ctypes.wintypes.DWORD), + ('guidItem', GUID), + ('hBalloonIcon', ctypes.wintypes.HICON), + ] + +NIM_ADD = 0 +NIM_MODIFY = 1 +NIM_DELETE = 2 +NIM_SETFOCUS = 3 +NIM_SETVERSION = 4 + +NIF_MESSAGE = 1 +NIF_ICON = 2 +NIF_TIP = 4 +NIF_STATE = 8 +NIF_INFO = 16 +NIF_GUID = 32 +NIF_REALTIME = 64 +NIF_SHOWTIP = 128 + +NIIF_NONE = 0 +NIIF_INFO = 1 +NIIF_WARNING = 2 +NIIF_ERROR = 3 +NIIF_USER = 4 + +NOTIFYICON_VERSION = 3 +NOTIFYICON_VERSION_4 = 4 + +Shell_NotifyIcon = ctypes.windll.shell32.Shell_NotifyIconW +Shell_NotifyIcon.restype = ctypes.wintypes.BOOL +Shell_NotifyIcon.argtypes = [ctypes.wintypes.DWORD, ctypes.POINTER(NOTIFYICONDATA)] + +# Load icon/image + +IMAGE_BITMAP = 0 +IMAGE_ICON = 1 +IMAGE_CURSOR = 2 + +LR_CREATEDIBSECTION = 0x00002000 +LR_DEFAULTCOLOR = 0x00000000 +LR_DEFAULTSIZE = 0x00000040 +LR_LOADFROMFILE = 0x00000010 +LR_LOADMAP3DCOLORS = 0x00001000 +LR_LOADTRANSPARENT = 0x00000020 +LR_MONOCHROME = 0x00000001 +LR_SHARED = 0x00008000 +LR_VGACOLOR = 0x00000080 + +OIC_SAMPLE = 32512 +OIC_HAND = 32513 +OIC_QUES = 32514 +OIC_BANG = 32515 +OIC_NOTE = 32516 +OIC_WINLOGO = 32517 +OIC_WARNING = OIC_BANG +OIC_ERROR = OIC_HAND +OIC_INFORMATION = OIC_NOTE + +LoadImage = ctypes.windll.user32.LoadImageW +LoadImage.restype = ctypes.wintypes.HANDLE +LoadImage.argtypes = [ctypes.wintypes.HINSTANCE, ctypes.wintypes.LPCWSTR, ctypes.wintypes.UINT, ctypes.c_int, ctypes.c_int, ctypes.wintypes.UINT] + +# CreateWindow call + +WNDPROC = ctypes.WINFUNCTYPE(ctypes.c_int, ctypes.wintypes.HWND, ctypes.c_uint, ctypes.wintypes.WPARAM, ctypes.wintypes.LPARAM) +DefWindowProc = ctypes.windll.user32.DefWindowProcW +DefWindowProc.restype = ctypes.c_int +DefWindowProc.argtypes = [ctypes.wintypes.HWND, ctypes.c_uint, ctypes.wintypes.WPARAM, ctypes.wintypes.LPARAM] + +WS_OVERLAPPED = 0x00000000L +WS_POPUP = 0x80000000L +WS_CHILD = 0x40000000L +WS_MINIMIZE = 0x20000000L +WS_VISIBLE = 0x10000000L +WS_DISABLED = 0x08000000L +WS_CLIPSIBLINGS = 0x04000000L +WS_CLIPCHILDREN = 0x02000000L +WS_MAXIMIZE = 0x01000000L +WS_CAPTION = 0x00C00000L +WS_BORDER = 0x00800000L +WS_DLGFRAME = 0x00400000L +WS_VSCROLL = 0x00200000L +WS_HSCROLL = 0x00100000L +WS_SYSMENU = 0x00080000L +WS_THICKFRAME = 0x00040000L +WS_GROUP = 0x00020000L +WS_TABSTOP = 0x00010000L + +WS_MINIMIZEBOX = 0x00020000L +WS_MAXIMIZEBOX = 0x00010000L + +WS_OVERLAPPEDWINDOW = (WS_OVERLAPPED | + WS_CAPTION | + WS_SYSMENU | + WS_THICKFRAME | + WS_MINIMIZEBOX | + WS_MAXIMIZEBOX) + +SM_XVIRTUALSCREEN = 76 +SM_YVIRTUALSCREEN = 77 +SM_CXVIRTUALSCREEN = 78 +SM_CYVIRTUALSCREEN = 79 +SM_CMONITORS = 80 +SM_SAMEDISPLAYFORMAT = 81 + +WM_NULL = 0x0000 +WM_CREATE = 0x0001 +WM_DESTROY = 0x0002 +WM_MOVE = 0x0003 +WM_SIZE = 0x0005 +WM_ACTIVATE = 0x0006 +WM_SETFOCUS = 0x0007 +WM_KILLFOCUS = 0x0008 +WM_ENABLE = 0x000A +WM_SETREDRAW = 0x000B +WM_SETTEXT = 0x000C +WM_GETTEXT = 0x000D +WM_GETTEXTLENGTH = 0x000E +WM_PAINT = 0x000F +WM_CLOSE = 0x0010 +WM_QUERYENDSESSION = 0x0011 +WM_QUIT = 0x0012 +WM_QUERYOPEN = 0x0013 +WM_ERASEBKGND = 0x0014 +WM_SYSCOLORCHANGE = 0x0015 +WM_ENDSESSION = 0x0016 +WM_SHOWWINDOW = 0x0018 +WM_CTLCOLOR = 0x0019 +WM_WININICHANGE = 0x001A +WM_SETTINGCHANGE = 0x001A +WM_DEVMODECHANGE = 0x001B +WM_ACTIVATEAPP = 0x001C +WM_FONTCHANGE = 0x001D +WM_TIMECHANGE = 0x001E +WM_CANCELMODE = 0x001F +WM_SETCURSOR = 0x0020 +WM_MOUSEACTIVATE = 0x0021 +WM_CHILDACTIVATE = 0x0022 +WM_QUEUESYNC = 0x0023 +WM_GETMINMAXINFO = 0x0024 +WM_PAINTICON = 0x0026 +WM_ICONERASEBKGND = 0x0027 +WM_NEXTDLGCTL = 0x0028 +WM_SPOOLERSTATUS = 0x002A +WM_DRAWITEM = 0x002B +WM_MEASUREITEM = 0x002C +WM_DELETEITEM = 0x002D +WM_VKEYTOITEM = 0x002E +WM_CHARTOITEM = 0x002F +WM_SETFONT = 0x0030 +WM_GETFONT = 0x0031 +WM_SETHOTKEY = 0x0032 +WM_GETHOTKEY = 0x0033 +WM_QUERYDRAGICON = 0x0037 +WM_COMPAREITEM = 0x0039 +WM_GETOBJECT = 0x003D +WM_COMPACTING = 0x0041 +WM_COMMNOTIFY = 0x0044 +WM_WINDOWPOSCHANGING = 0x0046 +WM_WINDOWPOSCHANGED = 0x0047 +WM_POWER = 0x0048 +WM_COPYDATA = 0x004A +WM_CANCELJOURNAL = 0x004B +WM_NOTIFY = 0x004E +WM_INPUTLANGCHANGEREQUEST = 0x0050 +WM_INPUTLANGCHANGE = 0x0051 +WM_TCARD = 0x0052 +WM_HELP = 0x0053 +WM_USERCHANGED = 0x0054 +WM_NOTIFYFORMAT = 0x0055 +WM_CONTEXTMENU = 0x007B +WM_STYLECHANGING = 0x007C +WM_STYLECHANGED = 0x007D +WM_DISPLAYCHANGE = 0x007E +WM_GETICON = 0x007F +WM_SETICON = 0x0080 +WM_NCCREATE = 0x0081 +WM_NCDESTROY = 0x0082 +WM_NCCALCSIZE = 0x0083 +WM_NCHITTEST = 0x0084 +WM_NCPAINT = 0x0085 +WM_NCACTIVATE = 0x0086 +WM_GETDLGCODE = 0x0087 +WM_SYNCPAINT = 0x0088 +WM_NCMOUSEMOVE = 0x00A0 +WM_NCLBUTTONDOWN = 0x00A1 +WM_NCLBUTTONUP = 0x00A2 +WM_NCLBUTTONDBLCLK = 0x00A3 +WM_NCRBUTTONDOWN = 0x00A4 +WM_NCRBUTTONUP = 0x00A5 +WM_NCRBUTTONDBLCLK = 0x00A6 +WM_NCMBUTTONDOWN = 0x00A7 +WM_NCMBUTTONUP = 0x00A8 +WM_NCMBUTTONDBLCLK = 0x00A9 +WM_KEYDOWN = 0x0100 +WM_KEYUP = 0x0101 +WM_CHAR = 0x0102 +WM_DEADCHAR = 0x0103 +WM_SYSKEYDOWN = 0x0104 +WM_SYSKEYUP = 0x0105 +WM_SYSCHAR = 0x0106 +WM_SYSDEADCHAR = 0x0107 +WM_KEYLAST = 0x0108 +WM_IME_STARTCOMPOSITION = 0x010D +WM_IME_ENDCOMPOSITION = 0x010E +WM_IME_COMPOSITION = 0x010F +WM_IME_KEYLAST = 0x010F +WM_INITDIALOG = 0x0110 +WM_COMMAND = 0x0111 +WM_SYSCOMMAND = 0x0112 +WM_TIMER = 0x0113 +WM_HSCROLL = 0x0114 +WM_VSCROLL = 0x0115 +WM_INITMENU = 0x0116 +WM_INITMENUPOPUP = 0x0117 +WM_MENUSELECT = 0x011F +WM_MENUCHAR = 0x0120 +WM_ENTERIDLE = 0x0121 +WM_MENURBUTTONUP = 0x0122 +WM_MENUDRAG = 0x0123 +WM_MENUGETOBJECT = 0x0124 +WM_UNINITMENUPOPUP = 0x0125 +WM_MENUCOMMAND = 0x0126 +WM_CTLCOLORMSGBOX = 0x0132 +WM_CTLCOLOREDIT = 0x0133 +WM_CTLCOLORLISTBOX = 0x0134 +WM_CTLCOLORBTN = 0x0135 +WM_CTLCOLORDLG = 0x0136 +WM_CTLCOLORSCROLLBAR = 0x0137 +WM_CTLCOLORSTATIC = 0x0138 +WM_MOUSEMOVE = 0x0200 +WM_LBUTTONDOWN = 0x0201 +WM_LBUTTONUP = 0x0202 +WM_LBUTTONDBLCLK = 0x0203 +WM_RBUTTONDOWN = 0x0204 +WM_RBUTTONUP = 0x0205 +WM_RBUTTONDBLCLK = 0x0206 +WM_MBUTTONDOWN = 0x0207 +WM_MBUTTONUP = 0x0208 +WM_MBUTTONDBLCLK = 0x0209 +WM_MOUSEWHEEL = 0x020A +WM_PARENTNOTIFY = 0x0210 +WM_ENTERMENULOOP = 0x0211 +WM_EXITMENULOOP = 0x0212 +WM_NEXTMENU = 0x0213 +WM_SIZING = 0x0214 +WM_CAPTURECHANGED = 0x0215 +WM_MOVING = 0x0216 +WM_DEVICECHANGE = 0x0219 +WM_MDICREATE = 0x0220 +WM_MDIDESTROY = 0x0221 +WM_MDIACTIVATE = 0x0222 +WM_MDIRESTORE = 0x0223 +WM_MDINEXT = 0x0224 +WM_MDIMAXIMIZE = 0x0225 +WM_MDITILE = 0x0226 +WM_MDICASCADE = 0x0227 +WM_MDIICONARRANGE = 0x0228 +WM_MDIGETACTIVE = 0x0229 +WM_MDISETMENU = 0x0230 +WM_ENTERSIZEMOVE = 0x0231 +WM_EXITSIZEMOVE = 0x0232 +WM_DROPFILES = 0x0233 +WM_MDIREFRESHMENU = 0x0234 +WM_IME_SETCONTEXT = 0x0281 +WM_IME_NOTIFY = 0x0282 +WM_IME_CONTROL = 0x0283 +WM_IME_COMPOSITIONFULL = 0x0284 +WM_IME_SELECT = 0x0285 +WM_IME_CHAR = 0x0286 +WM_IME_REQUEST = 0x0288 +WM_IME_KEYDOWN = 0x0290 +WM_IME_KEYUP = 0x0291 +WM_MOUSEHOVER = 0x02A1 +WM_MOUSELEAVE = 0x02A3 +WM_CUT = 0x0300 +WM_COPY = 0x0301 +WM_PASTE = 0x0302 +WM_CLEAR = 0x0303 +WM_UNDO = 0x0304 +WM_RENDERFORMAT = 0x0305 +WM_RENDERALLFORMATS = 0x0306 +WM_DESTROYCLIPBOARD = 0x0307 +WM_DRAWCLIPBOARD = 0x0308 +WM_PAINTCLIPBOARD = 0x0309 +WM_VSCROLLCLIPBOARD = 0x030A +WM_SIZECLIPBOARD = 0x030B +WM_ASKCBFORMATNAME = 0x030C +WM_CHANGECBCHAIN = 0x030D +WM_HSCROLLCLIPBOARD = 0x030E +WM_QUERYNEWPALETTE = 0x030F +WM_PALETTEISCHANGING = 0x0310 +WM_PALETTECHANGED = 0x0311 +WM_HOTKEY = 0x0312 +WM_PRINT = 0x0317 +WM_PRINTCLIENT = 0x0318 +WM_HANDHELDFIRST = 0x0358 +WM_HANDHELDLAST = 0x035F +WM_AFXFIRST = 0x0360 +WM_AFXLAST = 0x037F +WM_PENWINFIRST = 0x0380 +WM_PENWINLAST = 0x038F +WM_APP = 0x8000 +WM_USER = 0x0400 +WM_REFLECT = WM_USER + 0x1c00 + +class WNDCLASSEX(ctypes.Structure): + def __init__(self, *args, **kwargs): + super(WNDCLASSEX, self).__init__(*args, **kwargs) + self.cbSize = ctypes.sizeof(self) + _fields_ = [("cbSize", ctypes.c_uint), + ("style", ctypes.c_uint), + ("lpfnWndProc", WNDPROC), + ("cbClsExtra", ctypes.c_int), + ("cbWndExtra", ctypes.c_int), + ("hInstance", ctypes.wintypes.HANDLE), + ("hIcon", ctypes.wintypes.HANDLE), + ("hCursor", ctypes.wintypes.HANDLE), + ("hBrush", ctypes.wintypes.HANDLE), + ("lpszMenuName", ctypes.wintypes.LPCWSTR), + ("lpszClassName", ctypes.wintypes.LPCWSTR), + ("hIconSm", ctypes.wintypes.HANDLE)] + +UpdateWindow = ctypes.windll.user32.UpdateWindow +UpdateWindow.argtypes = [ctypes.wintypes.HWND] + +SW_HIDE = 0 +SW_SHOWNORMAL = 1 +SW_SHOW = 5 + +ShowWindow = ctypes.windll.user32.ShowWindow +ShowWindow.argtypes = [ctypes.wintypes.HWND, ctypes.c_int] + +CS_VREDRAW = 0x0001 +CS_HREDRAW = 0x0002 +CS_KEYCVTWINDOW = 0x0004 +CS_DBLCLKS = 0x0008 +CS_OWNDC = 0x0020 +CS_CLASSDC = 0x0040 +CS_PARENTDC = 0x0080 +CS_NOKEYCVT = 0x0100 +CS_NOCLOSE = 0x0200 +CS_SAVEBITS = 0x0800 +CS_BYTEALIGNCLIENT = 0x1000 +CS_BYTEALIGNWINDOW = 0x2000 +CS_GLOBALCLASS = 0x4000 + +COLOR_SCROLLBAR = 0 +COLOR_BACKGROUND = 1 +COLOR_ACTIVECAPTION = 2 +COLOR_INACTIVECAPTION = 3 +COLOR_MENU = 4 +COLOR_WINDOW = 5 +COLOR_WINDOWFRAME = 6 +COLOR_MENUTEXT = 7 +COLOR_WINDOWTEXT = 8 +COLOR_CAPTIONTEXT = 9 +COLOR_ACTIVEBORDER = 10 +COLOR_INACTIVEBORDER = 11 +COLOR_APPWORKSPACE = 12 +COLOR_HIGHLIGHT = 13 +COLOR_HIGHLIGHTTEXT = 14 +COLOR_BTNFACE = 15 +COLOR_BTNSHADOW = 16 +COLOR_GRAYTEXT = 17 +COLOR_BTNTEXT = 18 +COLOR_INACTIVECAPTIONTEXT = 19 +COLOR_BTNHIGHLIGHT = 20 + +LoadCursor = ctypes.windll.user32.LoadCursorW + +def GenerateDummyWindow(callback, uid): + newclass = WNDCLASSEX() + newclass.lpfnWndProc = callback + newclass.style = CS_VREDRAW | CS_HREDRAW + newclass.lpszClassName = uid.replace("-", "") + newclass.hBrush = COLOR_BACKGROUND + newclass.hCursor = LoadCursor(0, 32512) + ATOM = ctypes.windll.user32.RegisterClassExW(ctypes.byref(newclass)) + #print "ATOM", ATOM + #print "CLASS", newclass.lpszClassName + hwnd = ctypes.windll.user32.CreateWindowExW(0, + newclass.lpszClassName, + u"Dummy Window", + WS_OVERLAPPEDWINDOW | WS_SYSMENU, + ctypes.windll.user32.GetSystemMetrics(SM_CXVIRTUALSCREEN), + ctypes.windll.user32.GetSystemMetrics(SM_CYVIRTUALSCREEN), + 800, 600, 0, 0, 0, 0) + ShowWindow(hwnd, SW_SHOW) + UpdateWindow(hwnd) + ShowWindow(hwnd, SW_HIDE) + return hwnd + +# Message loop calls + +TIMERCALLBACK = ctypes.WINFUNCTYPE(None, + ctypes.wintypes.HWND, + ctypes.wintypes.UINT, + ctypes.POINTER(ctypes.wintypes.UINT), + ctypes.wintypes.DWORD) + +SetTimer = ctypes.windll.user32.SetTimer +SetTimer.restype = ctypes.POINTER(ctypes.wintypes.UINT) +SetTimer.argtypes = [ctypes.wintypes.HWND, + ctypes.POINTER(ctypes.wintypes.UINT), + ctypes.wintypes.UINT, + TIMERCALLBACK] + +KillTimer = ctypes.windll.user32.KillTimer +KillTimer.restype = ctypes.wintypes.BOOL +KillTimer.argtypes = [ctypes.wintypes.HWND, + ctypes.POINTER(ctypes.wintypes.UINT)] + +class MSG(ctypes.Structure): + _fields_ = [ ('HWND', ctypes.wintypes.HWND), + ('message', ctypes.wintypes.UINT), + ('wParam', ctypes.wintypes.WPARAM), + ('lParam', ctypes.wintypes.LPARAM), + ('time', ctypes.wintypes.DWORD), + ('pt', POINT)] + +GetMessage = ctypes.windll.user32.GetMessageW +GetMessage.restype = ctypes.wintypes.BOOL +GetMessage.argtypes = [ctypes.POINTER(MSG), ctypes.wintypes.HWND, ctypes.wintypes.UINT, ctypes.wintypes.UINT] + +TranslateMessage = ctypes.windll.user32.TranslateMessage +TranslateMessage.restype = ctypes.wintypes.ULONG +TranslateMessage.argtypes = [ctypes.POINTER(MSG)] + +DispatchMessage = ctypes.windll.user32.DispatchMessageW +DispatchMessage.restype = ctypes.wintypes.ULONG +DispatchMessage.argtypes = [ctypes.POINTER(MSG)] + +def LoadIcon(iconfilename, small=False): + return LoadImage(0, + unicode(iconfilename), + IMAGE_ICON, + 16 if small else 0, + 16 if small else 0, + LR_LOADFROMFILE) + + +class NotificationIcon(object): + def __init__(self, iconfilename, tooltip=None): + assert os.path.isfile(unicode(iconfilename)), "{} doesn't exist".format(iconfilename) + self._iconfile = unicode(iconfilename) + self._hicon = LoadIcon(self._iconfile, True) + assert self._hicon, "Failed to load {}".format(iconfilename) + #self._pumpqueue = Queue.Queue() + self._die = False + self._timerid = None + self._uid = uuid.uuid4() + self._tooltip = unicode(tooltip) if tooltip else u'' + #self._thread = threading.Thread(target=self._run) + #self._thread.start() + self._info_bubble = None + self.items = [] + + + def _bubble(self, iconinfo): + if self._info_bubble: + info_bubble = self._info_bubble + self._info_bubble = None + message = unicode(self._info_bubble) + iconinfo.uFlags |= NIF_INFO + iconinfo.szInfo = message + iconinfo.szInfoTitle = message + iconinfo.dwInfoFlags = NIIF_INFO + iconinfo.union.uTimeout = 10000 + Shell_NotifyIcon(NIM_MODIFY, ctypes.pointer(iconinfo)) + + + def _run(self): + self._windowproc = WNDPROC(self._callback) + self._hwnd = GenerateDummyWindow(self._windowproc, str(self._uid)) + + iconinfo = NOTIFYICONDATA() + iconinfo.hWnd = self._hwnd + iconinfo.uID = 100 + iconinfo.uFlags = NIF_ICON | NIF_SHOWTIP | NIF_MESSAGE | (NIF_TIP if self._tooltip else 0) + iconinfo.uCallbackMessage = WM_MENUCOMMAND + iconinfo.hIcon = self._hicon + iconinfo.szTip = self._tooltip + iconinfo.dwState = NIS_SHAREDICON + iconinfo.dwInfoFlags = NIIF_INFO + # iconinfo.dwStateMask = NIS_SHAREDICON + iconinfo.szInfo = "Application Title" + iconinfo.union.uTimeout = 5000 + + Shell_NotifyIcon(NIM_ADD, ctypes.pointer(iconinfo)) + + iconinfo.union.uVersion = NOTIFYICON_VERSION + Shell_NotifyIcon(NIM_SETVERSION, ctypes.pointer(iconinfo)) + self.iconinfo = iconinfo + + PostMessage(self._hwnd, WM_NULL, 0, 0) + + #self._timerid = SetTimer(self._hwnd, self._timerid, 25, TIMERCALLBACK()) + message = MSG() + last_time = -1 + ret = None + while not self._die: + try: + ret = GetMessage(ctypes.pointer(message), 0, 0, 0) + TranslateMessage(ctypes.pointer(message)) + DispatchMessage(ctypes.pointer(message)) + except Exception, err: + # print "NotificationIcon error", err, message + message = MSG() + time.sleep(0.125) + print "Icon thread stopped, removing icon..." + #KillTimer(self._hwnd, self._timerid) + + Shell_NotifyIcon(NIM_DELETE, ctypes.cast(ctypes.pointer(iconinfo), ctypes.POINTER(NOTIFYICONDATA))) + ctypes.windll.user32.DestroyWindow(self._hwnd) + ctypes.windll.user32.DestroyIcon(self._hicon) + + + def _menu(self): + if not hasattr(self, 'items'): + return + + menu = CreatePopupMenu() + func = None + + try: + iidx = 1000 + defaultitem = -1 + item_map = {} + for fs in self.items: + iidx += 1 + if isinstance(fs, basestring): + if fs and not fs.strip('-_='): + AppendMenu(menu, MF_SEPARATOR, iidx, fs) + else: + AppendMenu(menu, MF_STRING | MF_GRAYED, iidx, fs) + elif isinstance(fs, tuple): + if callable(fs[0]): + itemstring = fs[0]() + else: + itemstring = unicode(fs[0]) + flags = MF_STRING + if itemstring.startswith("!"): + itemstring = itemstring[1:] + defaultitem = iidx + if itemstring.startswith("+"): + itemstring = itemstring[1:] + flags = flags | MF_CHECKED + itemcallable = fs[1] + item_map[iidx] = itemcallable + if itemcallable is False: + flags = flags | MF_DISABLED + elif not callable(itemcallable): + flags = flags | MF_GRAYED + AppendMenu(menu, flags, iidx, itemstring) + + if defaultitem != -1: + SetMenuDefaultItem(menu, defaultitem, 0) + + pos = POINT() + GetCursorPos(ctypes.pointer(pos)) + + PostMessage(self._hwnd, WM_NULL, 0, 0) + + SetForegroundWindow(self._hwnd) + + ti = TrackPopupMenu(menu, TPM_RIGHTBUTTON | TPM_RETURNCMD | TPM_NONOTIFY, pos.x, pos.y, 0, self._hwnd, None) + + if ti in item_map: + func = item_map[ti] + + PostMessage(self._hwnd, WM_NULL, 0, 0) + finally: + DestroyMenu(menu) + if func: func() + + + def clicked(self): + self._menu() + + + + def _callback(self, hWnd, msg, wParam, lParam): + # Check if the main thread is still alive + if msg == WM_TIMER: + if not any(thread.getName() == 'MainThread' and thread.isAlive() + for thread in threading.enumerate()): + self._die = True + elif msg == WM_MENUCOMMAND and lParam == WM_LBUTTONUP: + self.clicked() + elif msg == WM_MENUCOMMAND and lParam == WM_RBUTTONUP: + self._menu() + else: + return DefWindowProc(hWnd, msg, wParam, lParam) + return 1 + + + def die(self): + self._die = True + PostMessage(self._hwnd, WM_NULL, 0, 0) + time.sleep(0.2) + try: + Shell_NotifyIcon(NIM_DELETE, self.iconinfo) + except Exception, err: + print "Icon remove error", err + ctypes.windll.user32.DestroyWindow(self._hwnd) + ctypes.windll.user32.DestroyIcon(self._hicon) + + + def pump(self): + try: + while not self._pumpqueue.empty(): + callable = self._pumpqueue.get(False) + callable() + except Queue.Empty: + pass + + + def announce(self, text): + self._info_bubble = text + + +def hideConsole(): + ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 0) + +def showConsole(): + ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 1) + +if __name__ == "__main__": + import time + def greet(): + ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 0) + print "Hello" + def quit(): + ni._die = True + #sys.exit() + def announce(): + ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 1) + ni.announce("Hello there") + + def clicked(): + ni.announce("Hello") + + def dynamicTitle(): + return "!The time is: %s" % time.time() + + ni = NotificationIcon(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../trayicon.ico'), "ZeroNet 0.2.9") + ni.items = [ + (dynamicTitle, False), + ('Hello', greet), + ('Title', False), + ('!Default', greet), + ('+Popup bubble', announce), + 'Nothing', + '--', + ('Quit', quit) + ] + ni.clicked = clicked + import atexit + @atexit.register + def goodbye(): + print "You are now leaving the Python sector." + + ni._run() diff --git a/plugins/Trayicon/lib/winfolders.py b/plugins/Trayicon/lib/winfolders.py new file mode 100644 index 00000000..d28efc1a --- /dev/null +++ b/plugins/Trayicon/lib/winfolders.py @@ -0,0 +1,53 @@ +''' Get windows special folders without pythonwin + Example: + import specialfolders + start_programs = specialfolders.get(specialfolders.PROGRAMS) + +Code is public domain, do with it what you will. + +Luke Pinner - Environment.gov.au, 2010 February 10 +''' + +#Imports use _syntax to mask them from autocomplete IDE's +import ctypes as _ctypes +from ctypes.wintypes import HWND as _HWND, HANDLE as _HANDLE,DWORD as _DWORD,LPCWSTR as _LPCWSTR,MAX_PATH as _MAX_PATH, create_unicode_buffer as _cub +_SHGetFolderPath = _ctypes.windll.shell32.SHGetFolderPathW + +#public special folder constants +DESKTOP= 0 +PROGRAMS= 2 +MYDOCUMENTS= 5 +FAVORITES= 6 +STARTUP= 7 +RECENT= 8 +SENDTO= 9 +STARTMENU= 11 +MYMUSIC= 13 +MYVIDEOS= 14 +NETHOOD= 19 +FONTS= 20 +TEMPLATES= 21 +ALLUSERSSTARTMENU= 22 +ALLUSERSPROGRAMS= 23 +ALLUSERSSTARTUP= 24 +ALLUSERSDESKTOP= 25 +APPLICATIONDATA= 26 +PRINTHOOD= 27 +LOCALSETTINGSAPPLICATIONDATA= 28 +ALLUSERSFAVORITES= 31 +LOCALSETTINGSTEMPORARYINTERNETFILES=32 +COOKIES= 33 +LOCALSETTINGSHISTORY= 34 +ALLUSERSAPPLICATIONDATA= 35 + +def get(intFolder): + _SHGetFolderPath.argtypes = [_HWND, _ctypes.c_int, _HANDLE, _DWORD, _LPCWSTR] + auPathBuffer = _cub(_MAX_PATH) + exit_code=_SHGetFolderPath(0, intFolder, 0, 0, auPathBuffer) + return auPathBuffer.value + + +if __name__ == "__main__": + import os + print get(STARTUP) + open(get(STARTUP)+"\\zeronet.cmd", "w").write("cd /D %s\r\nzeronet.py" % os.getcwd()) \ No newline at end of file diff --git a/plugins/Trayicon/trayicon.ico b/plugins/Trayicon/trayicon.ico new file mode 100644 index 00000000..fad67073 Binary files /dev/null and b/plugins/Trayicon/trayicon.ico differ diff --git a/plugins/Zeroname/SiteManagerPlugin.py b/plugins/Zeroname/SiteManagerPlugin.py new file mode 100644 index 00000000..d2b82c37 --- /dev/null +++ b/plugins/Zeroname/SiteManagerPlugin.py @@ -0,0 +1,71 @@ +import logging +import re + +from Plugin import PluginManager + +allow_reload = False # No reload supported + +log = logging.getLogger("ZeronamePlugin") + + +@PluginManager.registerTo("SiteManager") +class SiteManagerPlugin(object): + zeroname_address = "1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F" + site_zeroname = None + + def load(self): + super(SiteManagerPlugin, self).load() + if not self.get(self.zeroname_address): + self.need(self.zeroname_address) # Need ZeroName site + + # Checks if its a valid address + def isAddress(self, address): + if self.isDomain(address): + return True + else: + return super(SiteManagerPlugin, self).isAddress(address) + + # Return: True if the address is domain + def isDomain(self, address): + return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address) + + # Resolve domain + # Return: The address or None + def resolveDomain(self, domain): + domain = domain.lower() + if not self.site_zeroname: + self.site_zeroname = self.need(self.zeroname_address) + self.site_zeroname.needFile("data/names.json", priority=10) + db = self.site_zeroname.storage.loadJson("data/names.json") + return db.get(domain) + + # Return or create site and start download site files + # Return: Site or None if dns resolve failed + def need(self, address, all_file=True): + if self.isDomain(address): # Its looks like a domain + address_resolved = self.resolveDomain(address) + if address_resolved: + address = address_resolved + else: + return None + + return super(SiteManagerPlugin, self).need(address, all_file) + + # Return: Site object or None if not found + def get(self, address): + if self.sites is None: # Not loaded yet + self.load() + if self.isDomain(address): # Its looks like a domain + address_resolved = self.resolveDomain(address) + if address_resolved: # Domain found + site = self.sites.get(address_resolved) + if site: + site_domain = site.settings.get("domain") + if site_domain != address: + site.settings["domain"] = address + else: # Domain not found + site = self.sites.get(address) + + else: # Access by site address + site = self.sites.get(address) + return site diff --git a/plugins/Zeroname/UiRequestPlugin.py b/plugins/Zeroname/UiRequestPlugin.py new file mode 100644 index 00000000..d080a312 --- /dev/null +++ b/plugins/Zeroname/UiRequestPlugin.py @@ -0,0 +1,42 @@ +import re +from Plugin import PluginManager + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + + def __init__(self, *args, **kwargs): + from Site import SiteManager + self.site_manager = SiteManager.site_manager + super(UiRequestPlugin, self).__init__(*args, **kwargs) + + # Media request + def actionSiteMedia(self, path): + match = re.match("/media/(?P
    [A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P/.*|$)", path) + if match: # Its a valid domain, resolve first + domain = match.group("address") + address = self.site_manager.resolveDomain(domain) + if address: + path = "/media/" + address + match.group("inner_path") + return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output + + # Is mediarequest allowed from that referer + def isMediaRequestAllowed(self, site_address, referer): + referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address + referer_path = re.sub("\?.*", "", referer_path) # Remove http params + + if not re.sub("^http[s]{0,1}://", "", referer).startswith(self.env["HTTP_HOST"]): # Different origin + return False + + if self.isProxyRequest(): # Match to site domain + referer = re.sub("^http://zero[/]+", "http://", referer) # Allow /zero access + referer_site_address = re.match("http[s]{0,1}://(.*?)(/|$)", referer).group(1) + else: # Match to request path + referer_site_address = re.match("/(?P
    [A-Za-z0-9\.]+)(?P/.*|$)", referer_path).group("address") + + if referer_site_address == site_address: # Referer site address as simple address + return True + elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns + return True + else: # Invalid referer + return False diff --git a/plugins/Zeroname/__init__.py b/plugins/Zeroname/__init__.py new file mode 100644 index 00000000..889802db --- /dev/null +++ b/plugins/Zeroname/__init__.py @@ -0,0 +1,2 @@ +import UiRequestPlugin +import SiteManagerPlugin \ No newline at end of file diff --git a/plugins/Zeroname/updater/zeroname_updater.py b/plugins/Zeroname/updater/zeroname_updater.py new file mode 100644 index 00000000..591e9bff --- /dev/null +++ b/plugins/Zeroname/updater/zeroname_updater.py @@ -0,0 +1,159 @@ +import time +import json +import os +import sys +import re +import socket + +from bitcoinrpc.authproxy import AuthServiceProxy + + +def publish(): + print "* Signing..." + os.system("python zeronet.py siteSign %s %s" % (config["site"], config["privatekey"])) + print "* Publishing..." + os.system("python zeronet.py sitePublish %s" % config["site"]) + + +def processNameOp(domain, value): + if not value.startswith("{"): + return False + try: + data = json.loads(value) + except Exception, err: + print "Json load error: %s" % err + return False + if "zeronet" not in data: + print "No zeronet in ", data.keys() + return False + if not isinstance(data["zeronet"], dict): + print "Not dict: ", data["zeronet"] + return False + if not re.match("^[a-z0-9]([a-z0-9-]{0,62}[a-z0-9])?$", domain): + print "Invalid domain: ", domain + return False + + if "slave" in sys.argv: + print "Waiting for master update arrive" + time.sleep(30) # Wait 30 sec to allow master updater + + # Note: Requires the file data/names.json to exist and contain "{}" to work + names_raw = open(names_path, "rb").read() + names = json.loads(names_raw) + for subdomain, address in data["zeronet"].items(): + subdomain = subdomain.lower() + address = re.sub("[^A-Za-z0-9]", "", address) + print subdomain, domain, "->", address + if subdomain: + if re.match("^[a-z0-9]([a-z0-9-]{0,62}[a-z0-9])?$", subdomain): + names["%s.%s.bit" % (subdomain, domain)] = address + else: + print "Invalid subdomain:", domain, subdomain + else: + names["%s.bit" % domain] = address + + new_names_raw = json.dumps(names, indent=2, sort_keys=True) + if new_names_raw != names_raw: + open(names_path, "wb").write(new_names_raw) + return True + else: + print "names not changed" + return False + + +def processBlock(block_id): + print "Processing block #%s..." % block_id + s = time.time() + block_hash = rpc.getblockhash(block_id) + block = rpc.getblock(block_hash) + + print "Checking %s tx" % len(block["tx"]) + updated = 0 + for tx in block["tx"]: + try: + transaction = rpc.getrawtransaction(tx, 1) + for vout in transaction.get("vout", []): + if "scriptPubKey" in vout and "nameOp" in vout["scriptPubKey"] and "name" in vout["scriptPubKey"]["nameOp"]: + name_op = vout["scriptPubKey"]["nameOp"] + updated += processNameOp(name_op["name"].replace("d/", ""), name_op["value"]) + except Exception, err: + print "Error processing tx #%s %s" % (tx, err) + print "Done in %.3fs (updated %s)." % (time.time() - s, updated) + if updated: + publish() + + +# Loading config... + +# Check whether platform is on windows or linux +# On linux namecoin is installed under ~/.namecoin, while on on windows it is in %appdata%/Namecoin + +if sys.platform == "win32": + namecoin_location = os.getenv('APPDATA') + "/Namecoin/" +else: + namecoin_location = os.path.expanduser("~/.namecoin/") + +config_path = namecoin_location + 'zeroname_config.json' +if not os.path.isfile(config_path): # Create sample config + open(config_path, "w").write( + json.dumps({'site': 'site', 'zeronet_path': '/home/zeronet/', 'privatekey': '', 'lastprocessed': 223911}, indent=2) + ) + print "Example config written to %s" % config_path + sys.exit(0) + +config = json.load(open(config_path)) +names_path = "%s/data/%s/data/names.json" % (config["zeronet_path"], config["site"]) +os.chdir(config["zeronet_path"]) # Change working dir - tells script where Zeronet install is. + +# Getting rpc connect details +namecoin_conf = open(namecoin_location + "namecoin.conf").read() + +# Connecting to RPC +rpc_user = re.search("rpcuser=(.*)$", namecoin_conf, re.M).group(1) +rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1) +rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass) + +rpc = AuthServiceProxy(rpc_url, timeout=60 * 5) + +last_block = int(rpc.getinfo()["blocks"]) + +if not config["lastprocessed"]: # Start processing from last block + config["lastprocessed"] = last_block + +# Processing skipped blocks +print "Processing block from #%s to #%s..." % (config["lastprocessed"], last_block) +for block_id in range(config["lastprocessed"], last_block + 1): + processBlock(block_id) + +# processBlock(223911) # Testing zeronetwork.bit +# processBlock(227052) # Testing brainwallets.bit +# processBlock(236824) # Utf8 domain name (invalid should skip) +# processBlock(236752) # Uppercase domain (invalid should skip) +# processBlock(236870) # Encoded domain (should pass) +# sys.exit(0) + +while 1: + print "Waiting for new block", + sys.stdout.flush() + while 1: + try: + rpc = AuthServiceProxy(rpc_url, timeout=60 * 5) + if (int(rpc.getinfo()["blocks"]) > last_block): + break + time.sleep(1) + rpc.waitforblock() + print "Found" + break # Block found + except socket.timeout: # Timeout + print ".", + sys.stdout.flush() + except Exception, err: + print "Exception", err.__class__, err + time.sleep(5) + + last_block = int(rpc.getinfo()["blocks"]) + for block_id in range(config["lastprocessed"] + 1, last_block + 1): + processBlock(block_id) + + config["lastprocessed"] = last_block + open(config_path, "w").write(json.dumps(config, indent=1)) diff --git a/plugins/disabled-Dnschain/SiteManagerPlugin.py b/plugins/disabled-Dnschain/SiteManagerPlugin.py new file mode 100644 index 00000000..9121b425 --- /dev/null +++ b/plugins/disabled-Dnschain/SiteManagerPlugin.py @@ -0,0 +1,153 @@ +import logging, json, os, re, sys, time +import gevent +from Plugin import PluginManager +from Config import config +from util import Http +from Debug import Debug + +allow_reload = False # No reload supported + +log = logging.getLogger("DnschainPlugin") + +@PluginManager.registerTo("SiteManager") +class SiteManagerPlugin(object): + dns_cache_path = "%s/dns_cache.json" % config.data_dir + dns_cache = None + + # Checks if its a valid address + def isAddress(self, address): + if self.isDomain(address): + return True + else: + return super(SiteManagerPlugin, self).isAddress(address) + + + # Return: True if the address is domain + def isDomain(self, address): + return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address) + + + # Load dns entries from data/dns_cache.json + def loadDnsCache(self): + if os.path.isfile(self.dns_cache_path): + self.dns_cache = json.load(open(self.dns_cache_path)) + else: + self.dns_cache = {} + log.debug("Loaded dns cache, entries: %s" % len(self.dns_cache)) + + + # Save dns entries to data/dns_cache.json + def saveDnsCache(self): + json.dump(self.dns_cache, open(self.dns_cache_path, "wb"), indent=2) + + + # Resolve domain using dnschain.net + # Return: The address or None + def resolveDomainDnschainNet(self, domain): + try: + match = self.isDomain(domain) + sub_domain = match.group(1).strip(".") + top_domain = match.group(2) + if not sub_domain: sub_domain = "@" + address = None + with gevent.Timeout(5, Exception("Timeout: 5s")): + res = Http.get("https://api.dnschain.net/v1/namecoin/key/%s" % top_domain).read() + data = json.loads(res)["data"]["value"] + if "zeronet" in data: + for key, val in data["zeronet"].iteritems(): + self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours + self.saveDnsCache() + return data["zeronet"].get(sub_domain) + # Not found + return address + except Exception, err: + log.debug("Dnschain.net %s resolve error: %s" % (domain, Debug.formatException(err))) + + + # Resolve domain using dnschain.info + # Return: The address or None + def resolveDomainDnschainInfo(self, domain): + try: + match = self.isDomain(domain) + sub_domain = match.group(1).strip(".") + top_domain = match.group(2) + if not sub_domain: sub_domain = "@" + address = None + with gevent.Timeout(5, Exception("Timeout: 5s")): + res = Http.get("https://dnschain.info/bit/d/%s" % re.sub("\.bit$", "", top_domain)).read() + data = json.loads(res)["value"] + for key, val in data["zeronet"].iteritems(): + self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours + self.saveDnsCache() + return data["zeronet"].get(sub_domain) + # Not found + return address + except Exception, err: + log.debug("Dnschain.info %s resolve error: %s" % (domain, Debug.formatException(err))) + + + # Resolve domain + # Return: The address or None + def resolveDomain(self, domain): + domain = domain.lower() + if self.dns_cache == None: + self.loadDnsCache() + if domain.count(".") < 2: # Its a topleved request, prepend @. to it + domain = "@."+domain + + domain_details = self.dns_cache.get(domain) + if domain_details and time.time() < domain_details[1]: # Found in cache and its not expired + return domain_details[0] + else: + # Resovle dns using dnschain + thread_dnschain_info = gevent.spawn(self.resolveDomainDnschainInfo, domain) + thread_dnschain_net = gevent.spawn(self.resolveDomainDnschainNet, domain) + gevent.joinall([thread_dnschain_net, thread_dnschain_info]) # Wait for finish + + if thread_dnschain_info.value and thread_dnschain_net.value: # Booth successfull + if thread_dnschain_info.value == thread_dnschain_net.value: # Same returned value + return thread_dnschain_info.value + else: + log.error("Dns %s missmatch: %s != %s" % (domain, thread_dnschain_info.value, thread_dnschain_net.value)) + + # Problem during resolve + if domain_details: # Resolve failed, but we have it in the cache + domain_details[1] = time.time()+60*60 # Dont try again for 1 hour + return domain_details[0] + else: # Not found in cache + self.dns_cache[domain] = [None, time.time()+60] # Don't check again for 1 min + return None + + + # Return or create site and start download site files + # Return: Site or None if dns resolve failed + def need(self, address, all_file=True): + if self.isDomain(address): # Its looks like a domain + address_resolved = self.resolveDomain(address) + if address_resolved: + address = address_resolved + else: + return None + + return super(SiteManagerPlugin, self).need(address, all_file) + + + # Return: Site object or None if not found + def get(self, address): + if self.sites == None: # Not loaded yet + self.load() + if self.isDomain(address): # Its looks like a domain + address_resolved = self.resolveDomain(address) + if address_resolved: # Domain found + site = self.sites.get(address_resolved) + if site: + site_domain = site.settings.get("domain") + if site_domain != address: + site.settings["domain"] = address + else: # Domain not found + site = self.sites.get(address) + + else: # Access by site address + site = self.sites.get(address) + return site + diff --git a/plugins/disabled-Dnschain/UiRequestPlugin.py b/plugins/disabled-Dnschain/UiRequestPlugin.py new file mode 100644 index 00000000..65a386f1 --- /dev/null +++ b/plugins/disabled-Dnschain/UiRequestPlugin.py @@ -0,0 +1,34 @@ +import re +from Plugin import PluginManager + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def __init__(self, server = None): + from Site import SiteManager + self.site_manager = SiteManager.site_manager + super(UiRequestPlugin, self).__init__(server) + + + # Media request + def actionSiteMedia(self, path): + match = re.match("/media/(?P
    [A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P/.*|$)", path) + if match: # Its a valid domain, resolve first + domain = match.group("address") + address = self.site_manager.resolveDomain(domain) + if address: + path = "/media/"+address+match.group("inner_path") + return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output + + + # Is mediarequest allowed from that referer + def isMediaRequestAllowed(self, site_address, referer): + referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address + referer_site_address = re.match("/(?P
    [A-Za-z0-9\.]+)(?P/.*|$)", referer_path).group("address") + + if referer_site_address == site_address: # Referer site address as simple address + return True + elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns + return True + else: # Invalid referer + return False + diff --git a/plugins/disabled-Dnschain/__init__.py b/plugins/disabled-Dnschain/__init__.py new file mode 100644 index 00000000..2b36af5d --- /dev/null +++ b/plugins/disabled-Dnschain/__init__.py @@ -0,0 +1,3 @@ +# This plugin is experimental, if you really want to enable uncomment the following lines: +# import DnschainPlugin +# import SiteManagerPlugin \ No newline at end of file diff --git a/plugins/disabled-DonationMessage/DonationMessagePlugin.py b/plugins/disabled-DonationMessage/DonationMessagePlugin.py new file mode 100644 index 00000000..00be03f1 --- /dev/null +++ b/plugins/disabled-DonationMessage/DonationMessagePlugin.py @@ -0,0 +1,24 @@ +import re +from Plugin import PluginManager + +# Warning: If you modify the donation address then renmae the plugin's directory to "MyDonationMessage" to prevent the update script overwrite + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + # Inject a donation message to every page top right corner + def actionWrapper(self, path): + back = super(UiRequestPlugin, self).actionWrapper(path) + if not back or not hasattr(back, "endswith"): return back # Wrapper error or not string returned, injection not possible + + back = re.sub("\s*\s*$", + """ + + Please donate to help to keep this ZeroProxy alive + + + """, back) + + return back diff --git a/plugins/disabled-DonationMessage/__init__.py b/plugins/disabled-DonationMessage/__init__.py new file mode 100644 index 00000000..f8dcae2f --- /dev/null +++ b/plugins/disabled-DonationMessage/__init__.py @@ -0,0 +1 @@ +import DonationMessagePlugin diff --git a/plugins/disabled-Multiuser/MultiuserPlugin.py b/plugins/disabled-Multiuser/MultiuserPlugin.py new file mode 100644 index 00000000..9d65fac8 --- /dev/null +++ b/plugins/disabled-Multiuser/MultiuserPlugin.py @@ -0,0 +1,171 @@ +import re +import sys +from Plugin import PluginManager + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def __init__(self, *args, **kwargs): + self.user_manager = sys.modules["User.UserManager"].user_manager + super(UiRequestPlugin, self).__init__(*args, **kwargs) + + # Create new user and inject user welcome message if necessary + # Return: Html body also containing the injection + def actionWrapper(self, path, extra_headers=None): + + match = re.match("/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) + if not match: + return False + inner_path = match.group("inner_path").lstrip("/") + html_request = "." not in inner_path or inner_path.endswith(".html") # Only inject html to html requests + + user_created = False + if html_request: + user = self.getCurrentUser() # Get user from cookie + if not user: # No user found by cookie + user = self.user_manager.create() + user_created = True + + if user_created: + if not extra_headers: + extra_headers = [] + extra_headers.append(('Set-Cookie', "master_address=%s;path=/;max-age=2592000;" % user.master_address)) # = 30 days + + loggedin = self.get.get("login") == "done" + + back_generator = super(UiRequestPlugin, self).actionWrapper(path, extra_headers) # Get the wrapper frame output + + if not back_generator: # Wrapper error or not string returned, injection not possible + return False + + if user_created: + back = back_generator.next() + master_seed = user.master_seed + # Inject the welcome message + inject_html = """ + + + + + + """.replace("\t", "") + inject_html = inject_html.replace("{master_seed}", master_seed) # Set the master seed in the message + + return iter([re.sub("\s*\s*$", inject_html, back)]) # Replace the tags with the injection + + elif loggedin: + back = back_generator.next() + inject_html = """ + + + + + """.replace("\t", "") + return iter([re.sub("\s*\s*$", inject_html, back)]) # Replace the tags with the injection + + else: # No injection necessary + return back_generator + + # Get the current user based on request's cookies + # Return: User object or None if no match + def getCurrentUser(self): + cookies = self.getCookies() + user = None + if "master_address" in cookies: + users = self.user_manager.list() + user = users.get(cookies["master_address"]) + return user + + +@PluginManager.registerTo("UserManager") +class UserManagerPlugin(object): + # In multiuser mode do not load the users + def load(self): + if not self.users: + self.users = {} + return self.users + + # Find user by master address + # Return: User or None + def get(self, master_address=None): + users = self.list() + if master_address in users: + user = users[master_address] + else: + user = None + return user + + +@PluginManager.registerTo("User") +class UserPlugin(object): + # In multiuser mode users data only exits in memory, dont write to data/user.json + def save(self): + return False + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + # Let the page know we running in multiuser mode + def formatServerInfo(self): + server_info = super(UiWebsocketPlugin, self).formatServerInfo() + server_info["multiuser"] = True + if "ADMIN" in self.site.settings["permissions"]: + server_info["master_address"] = self.user.master_address + return server_info + + # Show current user's master seed + def actionUserShowMasterSeed(self, to): + if "ADMIN" not in self.site.settings["permissions"]: + return self.response(to, "Show master seed not allowed") + message = "Your unique private key:" + message += "
    %s
    " % self.user.master_seed + message += "(Save it, you can access your account using this information)" + self.cmd("notification", ["info", message]) + + # Logout user + def actionUserLogout(self, to): + if "ADMIN" not in self.site.settings["permissions"]: + return self.response(to, "Logout not allowed") + message = "You have been logged out. Login to another account" + message += "" + self.cmd("notification", ["done", message, 1000000]) # 1000000 = Show ~forever :) + # Delete from user_manager + user_manager = sys.modules["User.UserManager"].user_manager + if self.user.master_address in user_manager.users: + del user_manager.users[self.user.master_address] + self.response(to, "Successful logout") + else: + self.response(to, "User not found") + + # Show login form + def actionUserLoginForm(self, to): + self.cmd("prompt", ["Login
    Your private key:", "password", "Login"], self.responseUserLogin) + + # Login form submit + def responseUserLogin(self, master_seed): + user_manager = sys.modules["User.UserManager"].user_manager + user = user_manager.create(master_seed=master_seed) + if user.master_address: + message = "Successfull login, reloading page..." + message += "" % user.master_address + message += "" + self.cmd("notification", ["done", message]) + else: + self.cmd("notification", ["error", "Error: Invalid master seed"]) + self.actionUserLoginForm(0) diff --git a/plugins/disabled-Multiuser/__init__.py b/plugins/disabled-Multiuser/__init__.py new file mode 100644 index 00000000..154d6008 --- /dev/null +++ b/plugins/disabled-Multiuser/__init__.py @@ -0,0 +1 @@ +import MultiuserPlugin diff --git a/plugins/disabled-UiPassword/UiPasswordPlugin.py b/plugins/disabled-UiPassword/UiPasswordPlugin.py new file mode 100644 index 00000000..a0e42e81 --- /dev/null +++ b/plugins/disabled-UiPassword/UiPasswordPlugin.py @@ -0,0 +1,118 @@ +import string +import random +import time +import json +import re + +from Config import config +from Plugin import PluginManager + +if "sessions" not in locals().keys(): # To keep sessions between module reloads + sessions = {} + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + sessions = sessions + last_cleanup = time.time() + + def route(self, path): + if path.endswith("favicon.ico"): + return self.actionFile("src/Ui/media/img/favicon.ico") + else: + if config.ui_password: + if time.time() - self.last_cleanup > 60 * 60: # Cleanup expired sessions every hour + self.cleanup() + # Validate session + session_id = self.getCookies().get("session_id") + if session_id not in self.sessions: # Invalid session id, display login + return self.actionLogin() + return super(UiRequestPlugin, self).route(path) + + # Action: Login + def actionLogin(self): + template = open("plugins/UiPassword/login.html").read() + self.sendHeader() + posted = self.getPosted() + if posted: # Validate http posted data + if self.checkPassword(posted.get("password")): + # Valid password, create session + session_id = self.randomString(26) + self.sessions[session_id] = { + "added": time.time(), + "keep": posted.get("keep") + } + + # Redirect to homepage or referer + url = self.env.get("HTTP_REFERER", "") + if not url or re.sub("\?.*", "", url).endswith("/Login"): + url = "/" + config.homepage + cookie_header = ('Set-Cookie', "session_id=%s;path=/;max-age=2592000;" % session_id) # Max age = 30 days + self.start_response('301 Redirect', [('Location', url), cookie_header]) + yield "Redirecting..." + + else: + # Invalid password, show login form again + template = template.replace("{result}", "bad_password") + yield template + + def checkPassword(self, password): + if password == config.ui_password: + return True + else: + return False + + def randomString(self, chars): + return ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(chars)) + + @classmethod + def cleanup(cls): + cls.last_cleanup = time.time() + for session_id, session in cls.sessions.items(): + if session["keep"] and time.time() - session["added"] > 60 * 60 * 24 * 60: # Max 60days for keep sessions + del(cls.sessions[session_id]) + elif not session["keep"] and time.time() - session["added"] > 60 * 60 * 24: # Max 24h for non-keep sessions + del(cls.sessions[session_id]) + + # Action: Display sessions + def actionSessions(self): + self.sendHeader() + yield "
    "
    +        yield json.dumps(self.sessions, indent=4)
    +
    +    # Action: Logout
    +    def actionLogout(self):
    +        # Session id has to passed as get parameter or called without referer to avoid remote logout
    +        session_id = self.getCookies().get("session_id")
    +        if not self.env.get("HTTP_REFERER") or session_id == self.get.get("session_id"):
    +            if session_id in self.sessions:
    +                del self.sessions[session_id]
    +            self.start_response('301 Redirect', [
    +                ('Location', "/"),
    +                ('Set-Cookie', "session_id=deleted; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT")
    +            ])
    +            yield "Redirecting..."
    +        else:
    +            self.sendHeader()
    +            yield "Error: Invalid session id"
    +
    +
    +@PluginManager.registerTo("ConfigPlugin")
    +class ConfigPlugin(object):
    +    def createArguments(self):
    +        group = self.parser.add_argument_group("UiPassword plugin")
    +        group.add_argument('--ui_password', help='Password to access UiServer', default=None, metavar="password")
    +
    +        return super(ConfigPlugin, self).createArguments()
    +
    +
    +@PluginManager.registerTo("UiWebsocket")
    +class UiWebsocketPlugin(object):
    +    def actionUiLogout(self, to):
    +        permissions = self.getPermissions(to)
    +        if "ADMIN" not in permissions:
    +            return self.response(to, "You don't have permission to run this command")
    +
    +        session_id = self.request.getCookies().get("session_id", "")
    +        message = "" % session_id
    +        self.cmd("notification", ["done", message])
    diff --git a/plugins/disabled-UiPassword/__init__.py b/plugins/disabled-UiPassword/__init__.py
    new file mode 100644
    index 00000000..37350c3d
    --- /dev/null
    +++ b/plugins/disabled-UiPassword/__init__.py
    @@ -0,0 +1 @@
    +import UiPasswordPlugin
    \ No newline at end of file
    diff --git a/plugins/disabled-UiPassword/login.html b/plugins/disabled-UiPassword/login.html
    new file mode 100644
    index 00000000..ed16edbd
    --- /dev/null
    +++ b/plugins/disabled-UiPassword/login.html
    @@ -0,0 +1,116 @@
    +
    +
    + Log In
    + 
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    \ No newline at end of file
    diff --git a/plugins/disabled-Zeroname-local/SiteManagerPlugin.py b/plugins/disabled-Zeroname-local/SiteManagerPlugin.py
    new file mode 100644
    index 00000000..0d001fc3
    --- /dev/null
    +++ b/plugins/disabled-Zeroname-local/SiteManagerPlugin.py
    @@ -0,0 +1,68 @@
    +import logging, json, os, re, sys, time
    +import gevent
    +from Plugin import PluginManager
    +from Config import config
    +from Debug import Debug
    +from domainLookup import lookupDomain
    +
    +allow_reload = False # No reload supported
    +
    +log = logging.getLogger("Zeroname-localPlugin")
    +
    +
    +@PluginManager.registerTo("SiteManager")
    +class SiteManagerPlugin(object):
    +	def load(self):
    +		super(SiteManagerPlugin, self).load()
    +
    +	# Checks if its a valid address
    +	def isAddress(self, address):
    +		if self.isDomain(address): 
    +			return True
    +		else:
    +			return super(SiteManagerPlugin, self).isAddress(address)
    +
    +
    +	# Return: True if the address is domain
    +	def isDomain(self, address):
    +		return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address)
    +
    +
    +	# Resolve domain
    +	# Return: The address or None
    +	def resolveDomain(self, domain):
    +		return lookupDomain(domain)
    +
    +
    +	# Return or create site and start download site files
    +	# Return: Site or None if dns resolve failed
    +	def need(self, address, all_file=True):
    +		if self.isDomain(address): # Its looks like a domain
    +			address_resolved = self.resolveDomain(address)
    +			if address_resolved:
    +				address = address_resolved
    +			else:
    +				return None
    +		
    +		return super(SiteManagerPlugin, self).need(address, all_file)
    +
    +
    +	# Return: Site object or None if not found
    +	def get(self, address):
    +		if self.sites == None: # Not loaded yet
    +			self.load()
    +		if self.isDomain(address): # Its looks like a domain
    +			address_resolved = self.resolveDomain(address)
    +			if address_resolved: # Domain found
    +				site = self.sites.get(address_resolved)
    +				if site:
    +					site_domain = site.settings.get("domain")
    +					if site_domain != address:
    +						site.settings["domain"] = address
    +			else: # Domain not found
    +				site = self.sites.get(address)
    +
    +		else: # Access by site address
    +			site = self.sites.get(address)
    +		return site
    +
    diff --git a/plugins/disabled-Zeroname-local/UiRequestPlugin.py b/plugins/disabled-Zeroname-local/UiRequestPlugin.py
    new file mode 100644
    index 00000000..3e54c765
    --- /dev/null
    +++ b/plugins/disabled-Zeroname-local/UiRequestPlugin.py
    @@ -0,0 +1,40 @@
    +import re
    +from Plugin import PluginManager
    +
    +@PluginManager.registerTo("UiRequest")
    +class UiRequestPlugin(object):
    +	def __init__(self, *args, **kwargs):
    +		from Site import SiteManager
    +		self.site_manager = SiteManager.site_manager
    +		super(UiRequestPlugin, self).__init__(*args, **kwargs)
    +
    +
    +	# Media request
    +	def actionSiteMedia(self, path):
    +		match = re.match("/media/(?P
    [A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P/.*|$)", path) + if match: # Its a valid domain, resolve first + domain = match.group("address") + address = self.site_manager.resolveDomain(domain) + if address: + path = "/media/"+address+match.group("inner_path") + return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output + + + # Is mediarequest allowed from that referer + def isMediaRequestAllowed(self, site_address, referer): + referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address + referer_path = re.sub("\?.*", "", referer_path) # Remove http params + + if self.isProxyRequest(): # Match to site domain + referer = re.sub("^http://zero[/]+", "http://", referer) # Allow /zero access + referer_site_address = re.match("http[s]{0,1}://(.*?)(/|$)", referer).group(1) + else: # Match to request path + referer_site_address = re.match("/(?P
    [A-Za-z0-9\.]+)(?P/.*|$)", referer_path).group("address") + + if referer_site_address == site_address: # Referer site address as simple address + return True + elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns + return True + else: # Invalid referer + return False + diff --git a/plugins/disabled-Zeroname-local/__init__.py b/plugins/disabled-Zeroname-local/__init__.py new file mode 100644 index 00000000..889802db --- /dev/null +++ b/plugins/disabled-Zeroname-local/__init__.py @@ -0,0 +1,2 @@ +import UiRequestPlugin +import SiteManagerPlugin \ No newline at end of file diff --git a/plugins/disabled-Zeroname-local/domainLookup.py b/plugins/disabled-Zeroname-local/domainLookup.py new file mode 100644 index 00000000..5bed7438 --- /dev/null +++ b/plugins/disabled-Zeroname-local/domainLookup.py @@ -0,0 +1,54 @@ +from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException +import time, json, os, sys, re, socket, json + +# Either returns domain's address or none if it doesn't exist +# Supports subdomains and .bit on the end +def lookupDomain(domain): + domain = domain.lower() + + #remove .bit on end + if domain[-4:] == ".bit": + domain = domain[0:-4] + + #check for subdomain + if domain.find(".") != -1: + subdomain = domain[0:domain.find(".")] + domain = domain[domain.find(".")+1:] + else: + subdomain = "" + + try: + domain_object = rpc.name_show("d/"+domain) + except: + #domain doesn't exist + return None + + domain_json = json.loads(domain_object['value']) + + try: + domain_address = domain_json["zeronet"][subdomain] + except: + #domain exists but doesn't have any zeronet value + return None + + return domain_address + +# Loading config... + +# Check whether platform is on windows or linux +# On linux namecoin is installed under ~/.namecoin, while on on windows it is in %appdata%/Namecoin + +if sys.platform == "win32": + namecoin_location = os.getenv('APPDATA') + "/Namecoin/" +else: + namecoin_location = os.path.expanduser("~/.namecoin/") + +# Getting rpc connect details +namecoin_conf = open(namecoin_location + "namecoin.conf").read() + +# Connecting to RPC +rpc_user = re.search("rpcuser=(.*)$", namecoin_conf, re.M).group(1) +rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1) +rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass) + +rpc = AuthServiceProxy(rpc_url, timeout=60*5) diff --git a/requirements.txt b/requirements.txt index 538a6dfc..7d3c5720 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,13 +1,2 @@ -gevent==1.4.0; python_version <= "3.6" -greenlet==0.4.16; python_version <= "3.6" -gevent>=20.9.0; python_version >= "3.7" -msgpack>=0.4.4 -base58 -merkletools @ git+https://github.com/ZeroNetX/pymerkletools.git@dev -rsa -PySocks>=1.6.8 -pyasn1 -websocket_client -gevent-ws -coincurve -maxminddb +gevent==1.0.1 +msgpack-python>=0.4.4 diff --git a/src/Config.py b/src/Config.py index a9208d55..f82873b5 100644 --- a/src/Config.py +++ b/src/Config.py @@ -1,42 +1,16 @@ import argparse import sys import os -import locale -import re -import configparser -import logging -import logging.handlers -import stat -import time +import ConfigParser class Config(object): def __init__(self, argv): - self.version = "0.9.0" - self.rev = 4630 + self.version = "0.3.4" + self.rev = 668 self.argv = argv self.action = None - self.test_parser = None - self.pending_changes = {} - self.need_restart = False - self.keys_api_change_allowed = set([ - "tor", "fileserver_port", "language", "tor_use_bridges", "trackers_proxy", "trackers", - "trackers_file", "open_browser", "log_level", "fileserver_ip_type", "ip_external", "offline", - "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db" - ]) - self.keys_restart_need = set([ - "tor", "fileserver_port", "fileserver_ip_type", "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db" - ]) - self.start_dir = self.getStartDir() - - self.config_file = self.start_dir + "/zeronet.conf" - self.data_dir = self.start_dir + "/data" - self.log_dir = self.start_dir + "/log" - self.openssl_lib_file = None - self.openssl_bin_file = None - - self.trackers_file = False self.createParser() self.createArguments() @@ -53,41 +27,15 @@ class Config(object): def strToBool(self, v): return v.lower() in ("yes", "true", "t", "1") - def getStartDir(self): - this_file = os.path.abspath(__file__).replace("\\", "/").rstrip("cd") - - if "--start_dir" in self.argv: - start_dir = self.argv[self.argv.index("--start_dir") + 1] - elif this_file.endswith("/Contents/Resources/core/src/Config.py"): - # Running as ZeroNet.app - if this_file.startswith("/Application") or this_file.startswith("/private") or this_file.startswith(os.path.expanduser("~/Library")): - # Runnig from non-writeable directory, put data to Application Support - start_dir = os.path.expanduser("~/Library/Application Support/ZeroNet") - else: - # Running from writeable directory put data next to .app - start_dir = re.sub("/[^/]+/Contents/Resources/core/src/Config.py", "", this_file) - elif this_file.endswith("/core/src/Config.py"): - # Running as exe or source is at Application Support directory, put var files to outside of core dir - start_dir = this_file.replace("/core/src/Config.py", "") - elif this_file.endswith("usr/share/zeronet/src/Config.py"): - # Running from non-writeable location, e.g., AppImage - start_dir = os.path.expanduser("~/ZeroNet") - else: - start_dir = "." - - return start_dir - # Create command line arguments def createArguments(self): - from Crypt import CryptHash - access_key_default = CryptHash.random(24, "base64") # Used to allow restrited plugins when multiuser plugin is enabled trackers = [ - "http://open.acgnxtracker.com:80/announce", # DE - "http://tracker.bt4g.com:2095/announce", # Cloudflare - "http://tracker.files.fm:6969/announce", - "http://t.publictracker.xyz:6969/announce", - "https://tracker.lilithraws.cf:443/announce", - "https://tracker.babico.name.tr:443/announce", + "udp://tracker.coppersurfer.tk:6969", + "udp://tracker.leechers-paradise.org:6969", + "udp://9.rarbg.com:2710", + "http://tracker.aletorrenty.pl:2710/announce", + "http://tracker.skyts.net:6969/announce", + "http://torrent.gresille.org/announce" ] # Platform specific if sys.platform.startswith("win"): @@ -95,43 +43,13 @@ class Config(object): else: coffeescript = None - try: - language, enc = locale.getdefaultlocale() - language = language.lower().replace("_", "-") - if language not in ["pt-br", "zh-tw"]: - language = language.split("-")[0] - except Exception: - language = "en" - use_openssl = True - if repr(1483108852.565) != "1483108852.565": # Fix for weird Android issue - fix_float_decimals = True - else: - fix_float_decimals = False - - config_file = self.start_dir + "/zeronet.conf" - data_dir = self.start_dir + "/data" - log_dir = self.start_dir + "/log" - - ip_local = ["127.0.0.1", "::1"] - # Main action = self.subparsers.add_parser("main", help='Start UiServer and FileServer (default)') # SiteCreate action = self.subparsers.add_parser("siteCreate", help='Create a new site') - action.register('type', 'bool', self.strToBool) - action.add_argument('--use_master_seed', help="Allow created site's private key to be recovered using the master seed in users.json (default: True)", type="bool", choices=[True, False], default=True) - - # SiteNeedFile - action = self.subparsers.add_parser("siteNeedFile", help='Get a file from site') - action.add_argument('address', help='Site address') - action.add_argument('inner_path', help='File inner path') - - # SiteDownload - action = self.subparsers.add_parser("siteDownload", help='Download a new site') - action.add_argument('address', help='Site address') # SiteSign action = self.subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]') @@ -139,7 +57,6 @@ class Config(object): action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?') action.add_argument('--inner_path', help='File you want to sign (default: content.json)', default="content.json", metavar="inner_path") - action.add_argument('--remove_missing_optional', help='Remove optional files that is not present in the directory', action='store_true') action.add_argument('--publish', help='Publish site after the signing', action='store_true') # SitePublish @@ -156,12 +73,6 @@ class Config(object): action = self.subparsers.add_parser("siteVerify", help='Verify site files using sha512: address') action.add_argument('address', help='Site to verify') - # SiteCmd - action = self.subparsers.add_parser("siteCmd", help='Execute a ZeroFrame API command on a site') - action.add_argument('address', help='Site address') - action.add_argument('cmd', help='API command name') - action.add_argument('parameters', help='Parameters of the command', nargs='?') - # dbRebuild action = self.subparsers.add_parser("dbRebuild", help='Rebuild site database cache') action.add_argument('address', help='Site to rebuild') @@ -196,150 +107,56 @@ class Config(object): action.add_argument('message', help='Message to sign') action.add_argument('privatekey', help='Private key') - # Crypt Verify - action = self.subparsers.add_parser("cryptVerify", help='Verify message using Bitcoin public address') - action.add_argument('message', help='Message to verify') - action.add_argument('sign', help='Signiture for message') - action.add_argument('address', help='Signer\'s address') - - # Crypt GetPrivatekey - action = self.subparsers.add_parser("cryptGetPrivatekey", help='Generate a privatekey from master seed') - action.add_argument('master_seed', help='Source master seed') - action.add_argument('site_address_index', help='Site address index', type=int) - - action = self.subparsers.add_parser("getConfig", help='Return json-encoded info') - action = self.subparsers.add_parser("testConnection", help='Testing') - action = self.subparsers.add_parser("testAnnounce", help='Testing') - - self.test_parser = self.subparsers.add_parser("test", help='Run a test') - self.test_parser.add_argument('test_name', help='Test name', nargs="?") - # self.test_parser.add_argument('--benchmark', help='Run the tests multiple times to measure the performance', action='store_true') - # Config parameters - self.parser.add_argument('--verbose', help='More detailed logging', action='store_true') self.parser.add_argument('--debug', help='Debug mode', action='store_true') - self.parser.add_argument('--silent', help='Only log errors to terminal output', action='store_true') self.parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true') - self.parser.add_argument('--merge_media', help='Merge all.js and all.css', action='store_true') self.parser.add_argument('--batch', help="Batch mode (No interactive input for commands)", action='store_true') - self.parser.add_argument('--start_dir', help='Path of working dir for variable content (data, log, .conf)', default=self.start_dir, metavar="path") - self.parser.add_argument('--config_file', help='Path of config file', default=config_file, metavar="path") - self.parser.add_argument('--data_dir', help='Path of data directory', default=data_dir, metavar="path") + self.parser.add_argument('--config_file', help='Path of config file', default="zeronet.conf", metavar="path") + self.parser.add_argument('--data_dir', help='Path of data directory', default="data", metavar="path") + self.parser.add_argument('--log_dir', help='Path of logging directory', default="log", metavar="path") - self.parser.add_argument('--console_log_level', help='Level of logging to console', default="default", choices=["default", "DEBUG", "INFO", "ERROR", "off"]) - - self.parser.add_argument('--log_dir', help='Path of logging directory', default=log_dir, metavar="path") - self.parser.add_argument('--log_level', help='Level of logging to file', default="DEBUG", choices=["DEBUG", "INFO", "ERROR", "off"]) - self.parser.add_argument('--log_rotate', help='Log rotate interval', default="daily", choices=["hourly", "daily", "weekly", "off"]) - self.parser.add_argument('--log_rotate_backup_count', help='Log rotate backup count', default=5, type=int) - - self.parser.add_argument('--language', help='Web interface language', default=language, metavar='language') self.parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip') self.parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port') self.parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*') - self.parser.add_argument('--ui_host', help='Allow access using this hosts', metavar='host', nargs='*') - self.parser.add_argument('--ui_trans_proxy', help='Allow access using a transparent proxy', action='store_true') - self.parser.add_argument('--open_browser', help='Open homepage in web browser automatically', nargs='?', const="default_browser", metavar='browser_name') - self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d', + self.parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr', metavar='address') - self.parser.add_argument('--updatesite', help='Source code update site', default='1Update8crprmciJHwp2WXqkx2c4iYp18', - metavar='address') - self.parser.add_argument('--access_key', help='Plugin access key default: Random key generated at startup', default=access_key_default, metavar='key') - self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source') - - self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=25, type=int, metavar='limit') - self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit') - self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit') - self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit') - self.parser.add_argument('--workers', help='Download workers per site', default=5, type=int, metavar='workers') + self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, metavar='size') self.parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip') - self.parser.add_argument('--fileserver_port', help='FileServer bind port (0: randomize)', default=0, type=int, metavar='port') - self.parser.add_argument('--fileserver_port_range', help='FileServer randomization range', default="10000-40000", metavar='port') - self.parser.add_argument('--fileserver_ip_type', help='FileServer ip type', default="dual", choices=["ipv4", "ipv6", "dual"]) - self.parser.add_argument('--ip_local', help='My local ips', default=ip_local, type=int, metavar='ip', nargs='*') - self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip', nargs='*') - self.parser.add_argument('--offline', help='Disable network communication', action='store_true') - + self.parser.add_argument('--fileserver_port', help='FileServer bind port', default=15441, type=int, metavar='port') self.parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true') self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port') - self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip') + self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip') self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=trackers, metavar='protocol://address', nargs='*') - self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', metavar='path', nargs='*') - self.parser.add_argument('--trackers_proxy', help='Force use proxy to connect to trackers (disable, tor, ip:port)', default="disable") - self.parser.add_argument('--use_libsecp256k1', help='Use Libsecp256k1 liblary for speedup', type='bool', choices=[True, False], default=True) - self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=True) - self.parser.add_argument('--openssl_lib_file', help='Path for OpenSSL library file (default: detect)', default=argparse.SUPPRESS, metavar="path") - self.parser.add_argument('--openssl_bin_file', help='Path for OpenSSL binary file (default: detect)', default=argparse.SUPPRESS, metavar="path") - self.parser.add_argument('--disable_db', help='Disable database updating', action='store_true') + self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', default=False, metavar='path') + self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', + type='bool', choices=[True, False], default=use_openssl) self.parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true') - self.parser.add_argument('--force_encryption', help="Enforce encryption to all peer connections", action='store_true') self.parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory', type='bool', choices=[True, False], default=True) - self.parser.add_argument('--keep_ssl_cert', help='Disable new SSL cert generation on startup', action='store_true') - self.parser.add_argument('--max_files_opened', help='Change maximum opened files allowed by OS to this value on startup', - default=2048, type=int, metavar='limit') - self.parser.add_argument('--stack_size', help='Change thread stack size', default=None, type=int, metavar='thread_stack_size') self.parser.add_argument('--use_tempfiles', help='Use temporary files when downloading (experimental)', type='bool', choices=[True, False], default=False) self.parser.add_argument('--stream_downloads', help='Stream download directly to files (experimental)', type='bool', choices=[True, False], default=False) self.parser.add_argument("--msgpack_purepython", help='Use less memory, but a bit more CPU power', - type='bool', choices=[True, False], default=False) - self.parser.add_argument("--fix_float_decimals", help='Fix content.json modification date float precision on verification', - type='bool', choices=[True, False], default=fix_float_decimals) - self.parser.add_argument("--db_mode", choices=["speed", "security"], default="speed") - - self.parser.add_argument('--threads_fs_read', help='Number of threads for file read operations', default=1, type=int) - self.parser.add_argument('--threads_fs_write', help='Number of threads for file write operations', default=1, type=int) - self.parser.add_argument('--threads_crypt', help='Number of threads for cryptographic operations', default=2, type=int) - self.parser.add_argument('--threads_db', help='Number of threads for database operations', default=1, type=int) - - self.parser.add_argument("--download_optional", choices=["manual", "auto"], default="manual") + type='bool', choices=[True, False], default=True) self.parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript, metavar='executable_path') - self.parser.add_argument('--tor', help='enable: Use only for Tor peers, always: Use Tor for every connection', choices=["disable", "enable", "always"], default='enable') - self.parser.add_argument('--tor_controller', help='Tor controller address', metavar='ip:port', default='127.0.0.1:9051') - self.parser.add_argument('--tor_proxy', help='Tor proxy address', metavar='ip:port', default='127.0.0.1:9050') - self.parser.add_argument('--tor_password', help='Tor controller password', metavar='password') - self.parser.add_argument('--tor_use_bridges', help='Use obfuscated bridge relays to avoid Tor block', action='store_true') - self.parser.add_argument('--tor_hs_limit', help='Maximum number of hidden services in Tor always mode', metavar='limit', type=int, default=10) - self.parser.add_argument('--tor_hs_port', help='Hidden service port in Tor always mode', metavar='limit', type=int, default=15441) - self.parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev)) - self.parser.add_argument('--end', help='Stop multi value argument parsing', action='store_true') return self.parser def loadTrackersFile(self): - if not self.trackers_file: - self.trackers_file = ["trackers.txt", "{data_dir}/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d/trackers.txt"] - self.trackers = self.arguments.trackers[:] - - for trackers_file in self.trackers_file: - try: - if trackers_file.startswith("/"): # Absolute - trackers_file_path = trackers_file - elif trackers_file.startswith("{data_dir}"): # Relative to data_dir - trackers_file_path = trackers_file.replace("{data_dir}", self.data_dir) - else: # Relative to zeronet.py - trackers_file_path = self.start_dir + "/" + trackers_file - - if not os.path.exists(trackers_file_path): - continue - - for line in open(trackers_file_path): - tracker = line.strip() - if "://" in tracker and tracker not in self.trackers: - self.trackers.append(tracker) - except Exception as err: - print("Error loading trackers file: %s" % err) + self.trackers = [] + for tracker in open(self.trackers_file): + if "://" in tracker: + self.trackers.append(tracker.strip()) # Find arguments specified for current action def getActionArguments(self): @@ -351,7 +168,7 @@ class Config(object): # Try to find action from argv def getAction(self, argv): - actions = [list(action.choices.keys()) for action in self.parser._actions if action.dest == "action"][0] # Valid actions + actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions found_action = False for action in actions: # See if any in argv if action in argv: @@ -380,17 +197,8 @@ class Config(object): valid_parameters.append(arg) return valid_parameters + plugin_parameters - def getParser(self, argv): - action = self.getAction(argv) - if not action: - return self.parser - else: - return self.subparsers.choices[action] - # Parse arguments from config file and command line def parse(self, silent=False, parse_config=True): - argv = self.argv[:] # Copy command line arguments - current_parser = self.getParser(argv) if silent: # Don't display messages or quit on unknown parameter original_print_message = self.parser._print_message original_exit = self.parser.exit @@ -398,36 +206,27 @@ class Config(object): def silencer(parser, function_name): parser.exited = True return None - current_parser.exited = False - current_parser._print_message = lambda *args, **kwargs: silencer(current_parser, "_print_message") - current_parser.exit = lambda *args, **kwargs: silencer(current_parser, "exit") + self.parser.exited = False + self.parser._print_message = lambda *args, **kwargs: silencer(self.parser, "_print_message") + self.parser.exit = lambda *args, **kwargs: silencer(self.parser, "exit") - self.parseCommandline(argv, silent) # Parse argv - self.setAttributes() + argv = self.argv[:] # Copy command line arguments if parse_config: argv = self.parseConfig(argv) # Add arguments from config file - self.parseCommandline(argv, silent) # Parse argv self.setAttributes() - if not silent: - if self.fileserver_ip != "*" and self.fileserver_ip not in self.ip_local: - self.ip_local.append(self.fileserver_ip) - if silent: # Restore original functions - if current_parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action + if self.parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action self.action = None - current_parser._print_message = original_print_message - current_parser.exit = original_exit - - self.loadTrackersFile() + self.parser._print_message = original_print_message + self.parser.exit = original_exit # Parse command line arguments def parseCommandline(self, argv, silent=False): # Find out if action is specificed on start action = self.getAction(argv) if not action: - argv.append("--end") argv.append("main") action = "main" argv = self.moveUnknownToEnd(argv, action) @@ -443,55 +242,29 @@ class Config(object): # Parse config file def parseConfig(self, argv): # Find config file path from parameters + config_file = "zeronet.conf" if "--config_file" in argv: - self.config_file = argv[argv.index("--config_file") + 1] + config_file = argv[argv.index("--config_file") + 1] # Load config file - if os.path.isfile(self.config_file): - config = configparser.RawConfigParser(allow_no_value=True, strict=False) - config.read(self.config_file) + if os.path.isfile(config_file): + config = ConfigParser.ConfigParser(allow_no_value=True) + config.read(config_file) for section in config.sections(): for key, val in config.items(section): - if val == "True": - val = None if section != "global": # If not global prefix key with section key = section + "_" + key - - if key == "open_browser": # Prefer config file value over cli argument - while "--%s" % key in argv: - pos = argv.index("--open_browser") - del argv[pos:pos + 2] - - argv_extend = ["--%s" % key] if val: for line in val.strip().split("\n"): # Allow multi-line values - argv_extend.append(line) - if "\n" in val: - argv_extend.append("--end") - - argv = argv[:1] + argv_extend + argv[1:] + argv.insert(1, line) + argv.insert(1, "--%s" % key) return argv - # Return command line value of given argument - def getCmdlineValue(self, key): - if key not in self.argv: - return None - argv_index = self.argv.index(key) - if argv_index == len(self.argv) - 1: # last arg, test not specified - return None - - return self.argv[argv_index + 1] - # Expose arguments as class attributes def setAttributes(self): # Set attributes from arguments if self.arguments: args = vars(self.arguments) for key, val in args.items(): - if type(val) is list: - val = val[:] - if key in ("data_dir", "log_dir", "start_dir", "openssl_bin_file", "openssl_lib_file"): - if val: - val = val.replace("\\", "/") setattr(self, key, val) def loadPlugins(self): @@ -500,11 +273,7 @@ class Config(object): @PluginManager.acceptPlugins class ConfigPlugin(object): def __init__(self, config): - self.argv = config.argv self.parser = config.parser - self.subparsers = config.subparsers - self.test_parser = config.test_parser - self.getCmdlineValue = config.getCmdlineValue self.createArguments() def createArguments(self): @@ -512,164 +281,5 @@ class Config(object): ConfigPlugin(self) - def saveValue(self, key, value): - if not os.path.isfile(self.config_file): - content = "" - else: - content = open(self.config_file).read() - lines = content.splitlines() - - global_line_i = None - key_line_i = None - i = 0 - for line in lines: - if line.strip() == "[global]": - global_line_i = i - if line.startswith(key + " =") or line == key: - key_line_i = i - i += 1 - - if key_line_i and len(lines) > key_line_i + 1: - while True: # Delete previous multiline values - is_value_line = lines[key_line_i + 1].startswith(" ") or lines[key_line_i + 1].startswith("\t") - if not is_value_line: - break - del lines[key_line_i + 1] - - if value is None: # Delete line - if key_line_i: - del lines[key_line_i] - - else: # Add / update - if type(value) is list: - value_lines = [""] + [str(line).replace("\n", "").replace("\r", "") for line in value] - else: - value_lines = [str(value).replace("\n", "").replace("\r", "")] - new_line = "%s = %s" % (key, "\n ".join(value_lines)) - if key_line_i: # Already in the config, change the line - lines[key_line_i] = new_line - elif global_line_i is None: # No global section yet, append to end of file - lines.append("[global]") - lines.append(new_line) - else: # Has global section, append the line after it - lines.insert(global_line_i + 1, new_line) - - open(self.config_file, "w").write("\n".join(lines)) - - def getServerInfo(self): - from Plugin import PluginManager - import main - - info = { - "platform": sys.platform, - "fileserver_ip": self.fileserver_ip, - "fileserver_port": self.fileserver_port, - "ui_ip": self.ui_ip, - "ui_port": self.ui_port, - "version": self.version, - "rev": self.rev, - "language": self.language, - "debug": self.debug, - "plugins": PluginManager.plugin_manager.plugin_names, - - "log_dir": os.path.abspath(self.log_dir), - "data_dir": os.path.abspath(self.data_dir), - "src_dir": os.path.dirname(os.path.abspath(__file__)) - } - - try: - info["ip_external"] = main.file_server.port_opened - info["tor_enabled"] = main.file_server.tor_manager.enabled - info["tor_status"] = main.file_server.tor_manager.status - except Exception: - pass - - return info - - def initConsoleLogger(self): - if self.action == "main": - format = '[%(asctime)s] %(name)s %(message)s' - else: - format = '%(name)s %(message)s' - - if self.console_log_level == "default": - if self.silent: - level = logging.ERROR - elif self.debug: - level = logging.DEBUG - else: - level = logging.INFO - else: - level = logging.getLevelName(self.console_log_level) - - console_logger = logging.StreamHandler() - console_logger.setFormatter(logging.Formatter(format, "%H:%M:%S")) - console_logger.setLevel(level) - logging.getLogger('').addHandler(console_logger) - - def initFileLogger(self): - if self.action == "main": - log_file_path = "%s/debug.log" % self.log_dir - else: - log_file_path = "%s/cmd.log" % self.log_dir - - if self.log_rotate == "off": - file_logger = logging.FileHandler(log_file_path, "w", "utf-8") - else: - when_names = {"weekly": "w", "daily": "d", "hourly": "h"} - file_logger = logging.handlers.TimedRotatingFileHandler( - log_file_path, when=when_names[self.log_rotate], interval=1, backupCount=self.log_rotate_backup_count, - encoding="utf8" - ) - - if os.path.isfile(log_file_path): - file_logger.doRollover() # Always start with empty log file - file_logger.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)-8s %(name)s %(message)s')) - file_logger.setLevel(logging.getLevelName(self.log_level)) - logging.getLogger('').setLevel(logging.getLevelName(self.log_level)) - logging.getLogger('').addHandler(file_logger) - - def initLogging(self, console_logging=None, file_logging=None): - if console_logging == None: - console_logging = self.console_log_level != "off" - - if file_logging == None: - file_logging = self.log_level != "off" - - # Create necessary files and dirs - if not os.path.isdir(self.log_dir): - os.mkdir(self.log_dir) - try: - os.chmod(self.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) - except Exception as err: - print("Can't change permission of %s: %s" % (self.log_dir, err)) - - # Make warning hidden from console - logging.WARNING = 15 # Don't display warnings if not in debug mode - logging.addLevelName(15, "WARNING") - - logging.getLogger('').name = "-" # Remove root prefix - - self.error_logger = ErrorLogHandler() - self.error_logger.setLevel(logging.getLevelName("ERROR")) - logging.getLogger('').addHandler(self.error_logger) - - if console_logging: - self.initConsoleLogger() - if file_logging: - self.initFileLogger() - - -class ErrorLogHandler(logging.StreamHandler): - def __init__(self): - self.lines = [] - return super().__init__() - - def emit(self, record): - self.lines.append([time.time(), record.levelname, self.format(record)]) - - def onNewRecord(self, record): - pass - config = Config(sys.argv) diff --git a/src/Connection/Connection.py b/src/Connection/Connection.py index 22bcf29c..8c7063be 100644 --- a/src/Connection/Connection.py +++ b/src/Connection/Connection.py @@ -2,49 +2,33 @@ import socket import time import gevent -try: - from gevent.coros import RLock -except: - from gevent.lock import RLock +import msgpack from Config import config from Debug import Debug -from util import Msgpack +from util import StreamingMsgpack from Crypt import CryptConnection -from util import helper class Connection(object): __slots__ = ( - "sock", "sock_wrapped", "ip", "port", "cert_pin", "target_onion", "id", "protocol", "type", "server", "unpacker", "unpacker_bytes", "req_id", "ip_type", - "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "handshake_time", "last_recv_time", "is_private_ip", "is_tracker_connection", - "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "cpu_time", "send_lock", - "last_ping_delay", "last_req_time", "last_cmd_sent", "last_cmd_recv", "bad_actions", "sites", "name", "waiting_requests", "waiting_streams" + "sock", "sock_wrapped", "ip", "port", "id", "protocol", "type", "server", "unpacker", "req_id", + "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "last_recv_time", + "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", + "last_ping_delay", "last_req_time", "last_cmd", "name", "updateName", "waiting_requests", "waiting_streams" ) - def __init__(self, server, ip, port, sock=None, target_onion=None, is_tracker_connection=False): + def __init__(self, server, ip, port, sock=None): self.sock = sock - self.cert_pin = None - if "#" in ip: - ip, self.cert_pin = ip.split("#") - self.target_onion = target_onion # Requested onion adress + self.ip = ip + self.port = port self.id = server.last_connection_id server.last_connection_id += 1 self.protocol = "?" self.type = "?" - self.ip_type = "?" - self.port = int(port) - self.setIp(ip) - - if helper.isPrivateIp(self.ip) and self.ip not in config.ip_local: - self.is_private_ip = True - else: - self.is_private_ip = False - self.is_tracker_connection = is_tracker_connection self.server = server self.unpacker = None # Stream incoming socket messages here - self.unpacker_bytes = 0 # How many bytes the unpacker received self.req_id = 0 # Last request id self.handshake = {} # Handshake info got from peer self.crypt = None # Connection encryption method @@ -56,7 +40,6 @@ class Connection(object): # Stats self.start_time = time.time() - self.handshake_time = 0 self.last_recv_time = 0 self.last_message_time = 0 self.last_send_time = 0 @@ -66,12 +49,7 @@ class Connection(object): self.bytes_sent = 0 self.last_ping_delay = None self.last_req_time = 0 - self.last_cmd_sent = None - self.last_cmd_recv = None - self.bad_actions = 0 - self.sites = 0 - self.cpu_time = 0.0 - self.send_lock = RLock() + self.last_cmd = None self.name = None self.updateName() @@ -79,18 +57,6 @@ class Connection(object): self.waiting_requests = {} # Waiting sent requests self.waiting_streams = {} # Waiting response file streams - def setIp(self, ip): - self.ip = ip - self.ip_type = helper.getIpType(ip) - self.updateName() - - def createSocket(self): - if helper.getIpType(self.ip) == "ipv6" and not hasattr(socket, "socket_noproxy"): - # Create IPv6 connection as IPv4 when using proxy - return socket.socket(socket.AF_INET6, socket.SOCK_STREAM) - else: - return socket.socket(socket.AF_INET, socket.SOCK_STREAM) - def updateName(self): self.name = "Conn#%2s %-12s [%s]" % (self.id, self.ip, self.protocol) @@ -103,108 +69,39 @@ class Connection(object): def log(self, text): self.server.log.debug("%s > %s" % (self.name, text)) - def getValidSites(self): - return [key for key, val in self.server.tor_manager.site_onions.items() if val == self.target_onion] - - def badAction(self, weight=1): - self.bad_actions += weight - if self.bad_actions > 40: - self.close("Too many bad actions") - elif self.bad_actions > 20: - time.sleep(5) - - def goodAction(self): - self.bad_actions = 0 - # Open connection to peer and wait for handshake def connect(self): + self.log("Connecting...") self.type = "out" - if self.ip_type == "onion": - if not self.server.tor_manager or not self.server.tor_manager.enabled: - raise Exception("Can't connect to onion addresses, no Tor controller present") - self.sock = self.server.tor_manager.createSocket(self.ip, self.port) - elif config.tor == "always" and helper.isPrivateIp(self.ip) and self.ip not in config.ip_local: - raise Exception("Can't connect to local IPs in Tor: always mode") - elif config.trackers_proxy != "disable" and config.tor != "always" and self.is_tracker_connection: - if config.trackers_proxy == "tor": - self.sock = self.server.tor_manager.createSocket(self.ip, self.port) - else: - import socks - self.sock = socks.socksocket() - proxy_ip, proxy_port = config.trackers_proxy.split(":") - self.sock.set_proxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port)) - else: - self.sock = self.createSocket() + self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.sock.connect((self.ip, int(self.port))) - if "TCP_NODELAY" in dir(socket): - self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - timeout_before = self.sock.gettimeout() - self.sock.settimeout(30) - if self.ip_type == "ipv6" and not hasattr(self.sock, "proxy"): - sock_address = (self.ip, self.port, 1, 1) - else: - sock_address = (self.ip, self.port) - - self.sock.connect(sock_address) - - # Implicit SSL - should_encrypt = not self.ip_type == "onion" and self.ip not in self.server.broken_ssl_ips and self.ip not in config.ip_local - if self.cert_pin: - self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", cert_pin=self.cert_pin) - self.sock.do_handshake() - self.crypt = "tls-rsa" - self.sock_wrapped = True - elif should_encrypt and "tls-rsa" in CryptConnection.manager.crypt_supported: - try: - self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa") - self.sock.do_handshake() - self.crypt = "tls-rsa" - self.sock_wrapped = True - except Exception as err: - if not config.force_encryption: - self.log("Crypt connection error, adding %s:%s as broken ssl. %s" % (self.ip, self.port, Debug.formatException(err))) - self.server.broken_ssl_ips[self.ip] = True - self.sock.close() - self.crypt = None - self.sock = self.createSocket() - self.sock.settimeout(30) - self.sock.connect(sock_address) + # Implicit SSL in the future + # self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa") + # self.sock.do_handshake() + # self.crypt = "tls-rsa" + # self.sock_wrapped = True # Detect protocol - self.send({"cmd": "handshake", "req_id": 0, "params": self.getHandshakeInfo()}) + self.send({"cmd": "handshake", "req_id": 0, "params": self.handshakeInfo()}) event_connected = self.event_connected gevent.spawn(self.messageLoop) - connect_res = event_connected.get() # Wait for handshake - self.sock.settimeout(timeout_before) - return connect_res + return event_connected.get() # Wait for handshake # Handle incoming connection def handleIncomingConnection(self, sock): self.log("Incoming connection...") - - if "TCP_NODELAY" in dir(socket): - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - self.type = "in" - if self.ip not in config.ip_local: # Clearnet: Check implicit SSL - try: - first_byte = sock.recv(1, gevent.socket.MSG_PEEK) - if first_byte == b"\x16": - self.log("Crypt in connection using implicit SSL") - self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", True) - self.sock_wrapped = True - self.crypt = "tls-rsa" - except Exception as err: - self.log("Socket peek error: %s" % Debug.formatException(err)) + try: + if sock.recv(1, gevent.socket.MSG_PEEK) == "\x16": + self.log("Crypt in connection using implicit SSL") + self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", True) + self.sock_wrapped = True + self.crypt = "tls-rsa" + except Exception, err: + self.log("Socket peek error: %s" % Debug.formatException(err)) self.messageLoop() - def getMsgpackUnpacker(self): - if self.handshake and self.handshake.get("use_bin_type"): - return Msgpack.getUnpacker(fallback=True, decode=False) - else: # Backward compatibility for <0.7.0 - return Msgpack.getUnpacker(fallback=True, decode=True) - # Message loop for connection def messageLoop(self): if not self.sock: @@ -213,243 +110,78 @@ class Connection(object): self.protocol = "v2" self.updateName() self.connected = True - buff_len = 0 - req_len = 0 - self.unpacker_bytes = 0 + self.unpacker = msgpack.Unpacker() try: - while not self.closed: - buff = self.sock.recv(64 * 1024) + while True: + buff = self.sock.recv(16 * 1024) if not buff: break # Connection closed - buff_len = len(buff) # Statistics self.last_recv_time = time.time() self.incomplete_buff_recv += 1 - self.bytes_recv += buff_len - self.server.bytes_recv += buff_len - req_len += buff_len + self.bytes_recv += len(buff) + self.server.bytes_recv += len(buff) if not self.unpacker: - self.unpacker = self.getMsgpackUnpacker() - self.unpacker_bytes = 0 - + self.unpacker = msgpack.Unpacker() self.unpacker.feed(buff) - self.unpacker_bytes += buff_len - - while True: - try: - message = next(self.unpacker) - except StopIteration: - break - if not type(message) is dict: - if config.debug_socket: - self.log("Invalid message type: %s, content: %r, buffer: %r" % (type(message), message, buff[0:16])) - raise Exception("Invalid message type: %s" % type(message)) - - # Stats + buff = None + for message in self.unpacker: self.incomplete_buff_recv = 0 - stat_key = message.get("cmd", "unknown") - if stat_key == "response" and "to" in message: - cmd_sent = self.waiting_requests.get(message["to"], {"cmd": "unknown"})["cmd"] - stat_key = "response: %s" % cmd_sent - if stat_key == "update": - stat_key = "update: %s" % message["params"]["site"] - self.server.stat_recv[stat_key]["bytes"] += req_len - self.server.stat_recv[stat_key]["num"] += 1 if "stream_bytes" in message: - self.server.stat_recv[stat_key]["bytes"] += message["stream_bytes"] - req_len = 0 - - # Handle message - if "stream_bytes" in message: - buff_left = self.handleStream(message, buff) - self.unpacker = self.getMsgpackUnpacker() - self.unpacker.feed(buff_left) - self.unpacker_bytes = len(buff_left) - if config.debug_socket: - self.log("Start new unpacker with buff_left: %r" % buff_left) + self.handleStream(message) else: self.handleMessage(message) message = None - except Exception as err: + except Exception, err: if not self.closed: self.log("Socket error: %s" % Debug.formatException(err)) - self.server.stat_recv["error: %s" % err]["bytes"] += req_len - self.server.stat_recv["error: %s" % err]["num"] += 1 - self.close("MessageLoop ended (closed: %s)" % self.closed) # MessageLoop ended, close connection - - def getUnpackerUnprocessedBytesNum(self): - if "tell" in dir(self.unpacker): - bytes_num = self.unpacker_bytes - self.unpacker.tell() - else: - bytes_num = self.unpacker._fb_buf_n - self.unpacker._fb_buf_o - return bytes_num - - # Stream socket directly to a file - def handleStream(self, message, buff): - stream_bytes_left = message["stream_bytes"] - file = self.waiting_streams[message["to"]] - - unprocessed_bytes_num = self.getUnpackerUnprocessedBytesNum() - - if unprocessed_bytes_num: # Found stream bytes in unpacker - unpacker_stream_bytes = min(unprocessed_bytes_num, stream_bytes_left) - buff_stream_start = len(buff) - unprocessed_bytes_num - file.write(buff[buff_stream_start:buff_stream_start + unpacker_stream_bytes]) - stream_bytes_left -= unpacker_stream_bytes - else: - unpacker_stream_bytes = 0 - - if config.debug_socket: - self.log( - "Starting stream %s: %s bytes (%s from unpacker, buff size: %s, unprocessed: %s)" % - (message["to"], message["stream_bytes"], unpacker_stream_bytes, len(buff), unprocessed_bytes_num) - ) - - try: - while 1: - if stream_bytes_left <= 0: - break - stream_buff = self.sock.recv(min(64 * 1024, stream_bytes_left)) - if not stream_buff: - break - buff_len = len(stream_buff) - stream_bytes_left -= buff_len - file.write(stream_buff) - - # Statistics - self.last_recv_time = time.time() - self.incomplete_buff_recv += 1 - self.bytes_recv += buff_len - self.server.bytes_recv += buff_len - except Exception as err: - self.log("Stream read error: %s" % Debug.formatException(err)) - - if config.debug_socket: - self.log("End stream %s, file pos: %s" % (message["to"], file.tell())) - - self.incomplete_buff_recv = 0 - self.waiting_requests[message["to"]]["evt"].set(message) # Set the response to event - del self.waiting_streams[message["to"]] - del self.waiting_requests[message["to"]] - - if unpacker_stream_bytes: - return buff[buff_stream_start + unpacker_stream_bytes:] - else: - return b"" + self.close() # MessageLoop ended, close connection # My handshake info - def getHandshakeInfo(self): - # No TLS for onion connections - if self.ip_type == "onion": - crypt_supported = [] - elif self.ip in self.server.broken_ssl_ips: - crypt_supported = [] - else: - crypt_supported = CryptConnection.manager.crypt_supported - # No peer id for onion connections - if self.ip_type == "onion" or self.ip in config.ip_local: - peer_id = "" - else: - peer_id = self.server.peer_id - # Setup peer lock from requested onion address - if self.handshake and self.handshake.get("target_ip", "").endswith(".onion") and self.server.tor_manager.start_onions: - self.target_onion = self.handshake.get("target_ip").replace(".onion", "") # My onion address - if not self.server.tor_manager.site_onions.values(): - self.server.log.warning("Unknown target onion address: %s" % self.target_onion) - - handshake = { + def handshakeInfo(self): + return { "version": config.version, "protocol": "v2", - "use_bin_type": True, - "peer_id": peer_id, + "peer_id": self.server.peer_id, "fileserver_port": self.server.port, - "port_opened": self.server.port_opened.get(self.ip_type, None), - "target_ip": self.ip, + "port_opened": self.server.port_opened, "rev": config.rev, - "crypt_supported": crypt_supported, - "crypt": self.crypt, - "time": int(time.time()) + "crypt_supported": CryptConnection.manager.crypt_supported, + "crypt": self.crypt } - if self.target_onion: - handshake["onion"] = self.target_onion - elif self.ip_type == "onion": - handshake["onion"] = self.server.tor_manager.getOnion("global") - - if self.is_tracker_connection: - handshake["tracker_connection"] = True - - if config.debug_socket: - self.log("My Handshake: %s" % handshake) - - return handshake def setHandshake(self, handshake): - if config.debug_socket: - self.log("Remote Handshake: %s" % handshake) - - if handshake.get("peer_id") == self.server.peer_id and not handshake.get("tracker_connection") and not self.is_tracker_connection: - self.close("Same peer id, can't connect to myself") - self.server.peer_blacklist.append((handshake["target_ip"], handshake["fileserver_port"])) - return False - self.handshake = handshake - if handshake.get("port_opened", None) is False and "onion" not in handshake and not self.is_private_ip: # Not connectable + if handshake.get("port_opened", None) is False: # Not connectable self.port = 0 else: - self.port = int(handshake["fileserver_port"]) # Set peer fileserver port - - if handshake.get("use_bin_type") and self.unpacker: - unprocessed_bytes_num = self.getUnpackerUnprocessedBytesNum() - self.log("Changing unpacker to bin type (unprocessed bytes: %s)" % unprocessed_bytes_num) - unprocessed_bytes = self.unpacker.read_bytes(unprocessed_bytes_num) - self.unpacker = self.getMsgpackUnpacker() # Create new unpacker for different msgpack type - self.unpacker_bytes = 0 - if unprocessed_bytes: - self.unpacker.feed(unprocessed_bytes) + self.port = handshake["fileserver_port"] # Set peer fileserver port # Check if we can encrypt the connection - if handshake.get("crypt_supported") and self.ip not in self.server.broken_ssl_ips: - if type(handshake["crypt_supported"][0]) is bytes: - handshake["crypt_supported"] = [item.decode() for item in handshake["crypt_supported"]] # Backward compatibility - - if self.ip_type == "onion" or self.ip in config.ip_local: - crypt = None - elif handshake.get("crypt"): # Recommended crypt by server + if handshake.get("crypt_supported") and handshake["peer_id"] not in self.server.broken_ssl_peer_ids: + if handshake.get("crypt"): # Recommended crypt by server crypt = handshake["crypt"] else: # Select the best supported on both sides crypt = CryptConnection.manager.selectCrypt(handshake["crypt_supported"]) if crypt: self.crypt = crypt - - if self.type == "in" and handshake.get("onion") and not self.ip_type == "onion": # Set incoming connection's onion address - if self.server.ips.get(self.ip) == self: - del self.server.ips[self.ip] - self.setIp(handshake["onion"] + ".onion") - self.log("Changing ip to %s" % self.ip) - self.server.ips[self.ip] = self - self.updateName() - self.event_connected.set(True) # Mark handshake as done self.event_connected = None - self.handshake_time = time.time() # Handle incoming message def handleMessage(self, message): - cmd = message["cmd"] - self.last_message_time = time.time() - self.last_cmd_recv = cmd - if cmd == "response": # New style response + if message.get("cmd") == "response": # New style response if message["to"] in self.waiting_requests: - if self.last_send_time and len(self.waiting_requests) == 1: + if self.last_send_time: ping = time.time() - self.last_send_time self.last_ping_delay = ping - self.waiting_requests[message["to"]]["evt"].set(message) # Set the response to event + self.waiting_requests[message["to"]].set(message) # Set the response to event del self.waiting_requests[message["to"]] elif message["to"] == 0: # Other peers handshake ping = time.time() - self.start_time @@ -460,128 +192,140 @@ class Connection(object): if message.get("crypt") and not self.sock_wrapped: self.crypt = message["crypt"] server = (self.type == "in") - self.log("Crypt out connection using: %s (server side: %s, ping: %.3fs)..." % (self.crypt, server, ping)) - self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin) + self.log("Crypt out connection using: %s (server side: %s)..." % (self.crypt, server)) + self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server) self.sock.do_handshake() - self.sock_wrapped = True - - if not self.sock_wrapped and self.cert_pin: - self.close("Crypt connection error: Socket not encrypted, but certificate pin present") - return - self.setHandshake(message) else: self.log("Unknown response: %s" % message) - elif cmd: - self.server.num_recv += 1 - if cmd == "handshake": - self.handleHandshake(message) + elif message.get("cmd"): # Handhsake request + if message["cmd"] == "handshake": + if config.debug_socket: + self.log("Handshake request: %s" % message) + self.setHandshake(message["params"]) + data = self.handshakeInfo() + data["cmd"] = "response" + data["to"] = message["req_id"] + self.send(data) # Send response to handshake + # Sent crypt request to client + if self.crypt and not self.sock_wrapped: + server = (self.type == "in") + self.log("Crypt in connection using: %s (server side: %s)..." % (self.crypt, server)) + try: + self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server) + self.sock_wrapped = True + except Exception, err: + self.log("Crypt connection error: %s, adding peerid %s as broken ssl." % (err, message["params"]["peer_id"])) + self.server.broken_ssl_peer_ids[message["params"]["peer_id"]] = True else: self.server.handleRequest(self, message) + else: # Old style response, no req_id definied + if config.debug_socket: + self.log("Old style response, waiting: %s" % self.waiting_requests.keys()) + last_req_id = min(self.waiting_requests.keys()) # Get the oldest waiting request and set it true + self.waiting_requests[last_req_id].set(message) + del self.waiting_requests[last_req_id] # Remove from waiting request - # Incoming handshake set request - def handleHandshake(self, message): - self.setHandshake(message["params"]) - data = self.getHandshakeInfo() - data["cmd"] = "response" - data["to"] = message["req_id"] - self.send(data) # Send response to handshake - # Sent crypt request to client - if self.crypt and not self.sock_wrapped: - server = (self.type == "in") - self.log("Crypt in connection using: %s (server side: %s)..." % (self.crypt, server)) - try: - self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin) - self.sock_wrapped = True - except Exception as err: - if not config.force_encryption: - self.log("Crypt connection error, adding %s:%s as broken ssl. %s" % (self.ip, self.port, Debug.formatException(err))) - self.server.broken_ssl_ips[self.ip] = True - self.close("Broken ssl") + # Stream socket directly to a file + def handleStream(self, message): + if config.debug_socket: + self.log("Starting stream %s: %s bytes" % (message["to"], message["stream_bytes"])) - if not self.sock_wrapped and self.cert_pin: - self.close("Crypt connection error: Socket not encrypted, but certificate pin present") + read_bytes = message["stream_bytes"] # Bytes left we have to read from socket + try: + buff = self.unpacker.read_bytes(min(16 * 1024, read_bytes)) # Check if the unpacker has something left in buffer + except Exception, err: + buff = "" + file = self.waiting_streams[message["to"]] + if buff: + read_bytes -= len(buff) + file.write(buff) + + try: + while 1: + if read_bytes <= 0: + break + buff = self.sock.recv(16 * 1024) + if not buff: + break + buff_len = len(buff) + read_bytes -= buff_len + file.write(buff) + + # Statistics + self.last_recv_time = time.time() + self.incomplete_buff_recv += 1 + self.bytes_recv += buff_len + self.server.bytes_recv += buff_len + except Exception, err: + self.log("Stream read error: %s" % Debug.formatException(err)) + + if config.debug_socket: + self.log("End stream %s" % message["to"]) + + self.incomplete_buff_recv = 0 + self.waiting_requests[message["to"]].set(message) # Set the response to event + del self.waiting_streams[message["to"]] + del self.waiting_requests[message["to"]] # Send data to connection def send(self, message, streaming=False): - self.last_send_time = time.time() if config.debug_socket: self.log("Send: %s, to: %s, streaming: %s, site: %s, inner_path: %s, req_id: %s" % ( message.get("cmd"), message.get("to"), streaming, message.get("params", {}).get("site"), message.get("params", {}).get("inner_path"), message.get("req_id")) ) - - if not self.sock: - self.log("Send error: missing socket") - return False - - if not self.connected and message.get("cmd") != "handshake": - self.log("Wait for handshake before send request") - self.event_connected.get() - + self.last_send_time = time.time() try: - stat_key = message.get("cmd", "unknown") - if stat_key == "response": - stat_key = "response: %s" % self.last_cmd_recv - else: - self.server.num_sent += 1 - - self.server.stat_sent[stat_key]["num"] += 1 if streaming: - with self.send_lock: - bytes_sent = Msgpack.stream(message, self.sock.sendall) + bytes_sent = StreamingMsgpack.stream(message, self.sock.sendall) + message = None self.bytes_sent += bytes_sent self.server.bytes_sent += bytes_sent - self.server.stat_sent[stat_key]["bytes"] += bytes_sent - message = None else: - data = Msgpack.pack(message) + data = msgpack.packb(message) + message = None self.bytes_sent += len(data) self.server.bytes_sent += len(data) - self.server.stat_sent[stat_key]["bytes"] += len(data) - message = None - with self.send_lock: - self.sock.sendall(data) - except Exception as err: - self.close("Send error: %s (cmd: %s)" % (err, stat_key)) + self.sock.sendall(data) + except Exception, err: + self.log("Send errror: %s" % Debug.formatException(err)) + self.close() return False self.last_sent_time = time.time() return True - # Stream file to connection without msgpacking + # Stream raw file to connection def sendRawfile(self, file, read_bytes): buff = 64 * 1024 bytes_left = read_bytes - bytes_sent = 0 while True: self.last_send_time = time.time() - data = file.read(min(bytes_left, buff)) - bytes_sent += len(data) - with self.send_lock: - self.sock.sendall(data) + self.sock.sendall( + file.read(min(bytes_left, buff)) + ) bytes_left -= buff if bytes_left <= 0: break - self.bytes_sent += bytes_sent - self.server.bytes_sent += bytes_sent - self.server.stat_sent["raw_file"]["num"] += 1 - self.server.stat_sent["raw_file"]["bytes"] += bytes_sent + self.bytes_sent += read_bytes + self.server.bytes_sent += read_bytes return True # Create and send a request to peer def request(self, cmd, params={}, stream_to=None): # Last command sent more than 10 sec ago, timeout if self.waiting_requests and self.protocol == "v2" and time.time() - max(self.last_req_time, self.last_recv_time) > 10: - self.close("Request %s timeout: %.3fs" % (self.last_cmd_sent, time.time() - self.last_send_time)) + self.log("Request %s timeout: %s" % (self.last_cmd, time.time() - self.last_send_time)) + self.close() return False self.last_req_time = time.time() - self.last_cmd_sent = cmd + self.last_cmd = cmd self.req_id += 1 data = {"cmd": cmd, "req_id": self.req_id, "params": params} event = gevent.event.AsyncResult() # Create new event for response - self.waiting_requests[self.req_id] = {"evt": event, "cmd": cmd} + self.waiting_requests[self.req_id] = event if stream_to: self.waiting_streams[self.req_id] = stream_to self.send(data) # Send request @@ -594,16 +338,16 @@ class Connection(object): with gevent.Timeout(10.0, False): try: response = self.request("ping") - except Exception as err: + except Exception, err: self.log("Ping error: %s" % Debug.formatException(err)) - if response and "body" in response and response["body"] == b"Pong!": + if response and "body" in response and response["body"] == "Pong!": self.last_ping_delay = time.time() - s return True else: return False # Close connection - def close(self, reason="Unknown"): + def close(self): if self.closed: return False # Already closed self.closed = True @@ -611,21 +355,21 @@ class Connection(object): if self.event_connected: self.event_connected.set(False) - self.log( - "Closing connection: %s, waiting_requests: %s, sites: %s, buff: %s..." % - (reason, len(self.waiting_requests), self.sites, self.incomplete_buff_recv) - ) + if config.debug_socket: + self.log( + "Closing connection, waiting_requests: %s, buff: %s..." % + (len(self.waiting_requests), self.incomplete_buff_recv) + ) for request in self.waiting_requests.values(): # Mark pending requests failed - request["evt"].set(False) + request.set(False) self.waiting_requests = {} self.waiting_streams = {} - self.sites = 0 self.server.removeConnection(self) # Remove connection from server registry try: if self.sock: self.sock.shutdown(gevent.socket.SHUT_WR) self.sock.close() - except Exception as err: + except Exception, err: if config.debug_socket: self.log("Close error: %s" % err) diff --git a/src/Connection/ConnectionServer.py b/src/Connection/ConnectionServer.py index c9048398..72c53c83 100644 --- a/src/Connection/ConnectionServer.py +++ b/src/Connection/ConnectionServer.py @@ -1,153 +1,82 @@ import logging +import random +import string import time import sys -import socket -from collections import defaultdict import gevent import msgpack from gevent.server import StreamServer from gevent.pool import Pool -import util -from util import helper from Debug import Debug -from .Connection import Connection +from Connection import Connection from Config import config from Crypt import CryptConnection from Crypt import CryptHash -from Tor import TorManager -from Site import SiteManager -class ConnectionServer(object): +class ConnectionServer: def __init__(self, ip=None, port=None, request_handler=None): - if not ip: - if config.fileserver_ip_type == "ipv6": - ip = "::1" - else: - ip = "127.0.0.1" - port = 15441 self.ip = ip self.port = port - self.last_connection_id = 0 # Connection id incrementer - self.last_connection_id_current_version = 0 # Connection id incrementer for current client version - self.last_connection_id_supported_version = 0 # Connection id incrementer for last supported version + self.last_connection_id = 1 # Connection id incrementer self.log = logging.getLogger("ConnServer") - self.port_opened = {} - self.peer_blacklist = SiteManager.peer_blacklist + self.port_opened = None - self.tor_manager = TorManager(self.ip, self.port) self.connections = [] # Connections - self.whitelist = config.ip_local # No flood protection on this ips self.ip_incoming = {} # Incoming connections from ip in the last minute to avoid connection flood - self.broken_ssl_ips = {} # Peerids of broken ssl connections + self.broken_ssl_peer_ids = {} # Peerids of broken ssl connections self.ips = {} # Connection by ip - self.has_internet = True # Internet outage detection - self.stream_server = None - self.stream_server_proxy = None - self.running = False - self.stopping = False - self.thread_checker = None + self.running = True + self.thread_checker = gevent.spawn(self.checkConnections) - self.stat_recv = defaultdict(lambda: defaultdict(int)) - self.stat_sent = defaultdict(lambda: defaultdict(int)) self.bytes_recv = 0 self.bytes_sent = 0 - self.num_recv = 0 - self.num_sent = 0 - - self.num_incoming = 0 - self.num_outgoing = 0 - self.had_external_incoming = False - - self.timecorrection = 0.0 - self.pool = Pool(500) # do not accept more than 500 connections # Bittorrent style peerid - self.peer_id = "-UT3530-%s" % CryptHash.random(12, "base64") + self.peer_id = "-ZN0%s-%s" % (config.version.replace(".", ""), CryptHash.random(12, "base64")) # Check msgpack version if msgpack.version[0] == 0 and msgpack.version[1] < 4: self.log.error( - "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo apt-get install python-pip; sudo pip install msgpack --upgrade`" % + "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo pip install msgpack-python --upgrade`" % str(msgpack.version) ) sys.exit(0) - if request_handler: - self.handleRequest = request_handler - - def start(self, check_connections=True): - if self.stopping: - return False - self.running = True - if check_connections: - self.thread_checker = gevent.spawn(self.checkConnections) - CryptConnection.manager.loadCerts() - if config.tor != "disable": - self.tor_manager.start() - if not self.port: - self.log.info("No port found, not binding") - return False - - self.log.debug("Binding to: %s:%s, (msgpack: %s), supported crypt: %s" % ( - self.ip, self.port, ".".join(map(str, msgpack.version)), - CryptConnection.manager.crypt_supported - )) - try: + if port: # Listen server on a port + self.pool = Pool(1000) # do not accept more than 1000 connections self.stream_server = StreamServer( - (self.ip, self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100 + (ip.replace("*", ""), port), self.handleIncomingConnection, spawn=self.pool, backlog=100 ) - except Exception as err: - self.log.info("StreamServer create error: %s" % Debug.formatException(err)) + if request_handler: + self.handleRequest = request_handler - def listen(self): - if not self.running: - return None - - if self.stream_server_proxy: - gevent.spawn(self.listenProxy) + def start(self): + self.running = True + CryptConnection.manager.loadCerts() + self.log.debug("Binding to: %s:%s, (msgpack: %s), supported crypt: %s" % ( + self.ip, self.port, + ".".join(map(str, msgpack.version)), CryptConnection.manager.crypt_supported) + ) try: - self.stream_server.serve_forever() - except Exception as err: - self.log.info("StreamServer listen error: %s" % err) - return False - self.log.debug("Stopped.") + self.stream_server.serve_forever() # Start normal connection server + except Exception, err: + self.log.info("StreamServer bind error, must be running already: %s" % err) def stop(self): - self.log.debug("Stopping %s" % self.stream_server) - self.stopping = True self.running = False - if self.thread_checker: - gevent.kill(self.thread_checker) - if self.stream_server: - self.stream_server.stop() - - def closeConnections(self): - self.log.debug("Closing all connection: %s" % len(self.connections)) - for connection in self.connections[:]: - connection.close("Close all connections") + self.stream_server.stop() def handleIncomingConnection(self, sock, addr): - if config.offline: - sock.close() - return False - - ip, port = addr[0:2] - ip = ip.lower() - if ip.startswith("::ffff:"): # IPv6 to IPv4 mapping - ip = ip.replace("::ffff:", "", 1) - self.num_incoming += 1 - - if not self.had_external_incoming and not helper.isPrivateIp(ip): - self.had_external_incoming = True + ip, port = addr # Connection flood protection - if ip in self.ip_incoming and ip not in self.whitelist: + if ip in self.ip_incoming: self.ip_incoming[ip] += 1 - if self.ip_incoming[ip] > 6: # Allow 6 in 1 minute from same ip + if self.ip_incoming[ip] > 3: # Allow 3 in 1 minute from same ip self.log.debug("Connection flood detected from %s" % ip) time.sleep(30) sock.close() @@ -157,33 +86,13 @@ class ConnectionServer(object): connection = Connection(self, ip, port, sock) self.connections.append(connection) - rev = connection.handshake.get("rev", 0) - if rev >= 4560: - self.last_connection_id_supported_version += 1 - if rev == config.rev: - self.last_connection_id_current_version += 1 - if ip not in config.ip_local: - self.ips[ip] = connection + self.ips[ip] = connection connection.handleIncomingConnection(sock) - def handleMessage(self, *args, **kwargs): - pass - - def getConnection(self, ip=None, port=None, peer_id=None, create=True, site=None, is_tracker_connection=False): - ip_type = helper.getIpType(ip) - has_per_site_onion = (ip.endswith(".onion") or self.port_opened.get(ip_type, None) == False) and self.tor_manager.start_onions and site - if has_per_site_onion: # Site-unique connection for Tor - if ip.endswith(".onion"): - site_onion = self.tor_manager.getOnion(site.address) - else: - site_onion = self.tor_manager.getOnion("global") - key = ip + site_onion - else: - key = ip - + def getConnection(self, ip=None, port=None, peer_id=None, create=True): # Find connection by ip - if key in self.ips: - connection = self.ips[key] + if ip in self.ips: + connection = self.ips[ip] if not peer_id or connection.handshake.get("peer_id") == peer_id: # Filter by peer_id if not connection.connected and create: succ = connection.event_connected.get() # Wait for connection @@ -191,196 +100,81 @@ class ConnectionServer(object): raise Exception("Connection event return error") return connection - # Recover from connection pool - for connection in self.connections: - if connection.ip == ip: - if peer_id and connection.handshake.get("peer_id") != peer_id: # Does not match - continue - if ip.endswith(".onion") and self.tor_manager.start_onions and ip.replace(".onion", "") != connection.target_onion: - # For different site - continue - if not connection.connected and create: - succ = connection.event_connected.get() # Wait for connection - if not succ: - raise Exception("Connection event return error") - return connection + # Recover from connection pool + for connection in self.connections: + if connection.ip == ip: + if peer_id and connection.handshake.get("peer_id") != peer_id: # Does not match + continue + if not connection.connected and create: + succ = connection.event_connected.get() # Wait for connection + if not succ: + raise Exception("Connection event return error") + return connection # No connection found - if create and not config.offline: # Allow to create new connection if not found + if create: # Allow to create new connection if not found if port == 0: raise Exception("This peer is not connectable") - - if (ip, port) in self.peer_blacklist and not is_tracker_connection: - raise Exception("This peer is blacklisted") - try: - if has_per_site_onion: # Lock connection to site - connection = Connection(self, ip, port, target_onion=site_onion, is_tracker_connection=is_tracker_connection) - else: - connection = Connection(self, ip, port, is_tracker_connection=is_tracker_connection) - self.num_outgoing += 1 - self.ips[key] = connection + connection = Connection(self, ip, port) + self.ips[ip] = connection self.connections.append(connection) - connection.log("Connecting... (site: %s)" % site) succ = connection.connect() if not succ: - connection.close("Connection event return error") + connection.close() raise Exception("Connection event return error") - else: - rev = connection.handshake.get("rev", 0) - if rev >= 4560: - self.last_connection_id_supported_version += 1 - if rev == config.rev: - self.last_connection_id_current_version += 1 - except Exception as err: - connection.close("%s Connect error: %s" % (ip, Debug.formatException(err))) + except Exception, err: + self.log.debug("%s Connect error: %s" % (ip, Debug.formatException(err))) + connection.close() raise err - - if len(self.connections) > config.global_connected_limit: - gevent.spawn(self.checkMaxConnections) - return connection else: return None def removeConnection(self, connection): - # Delete if same as in registry - if self.ips.get(connection.ip) == connection: + self.log.debug("Removing %s..." % connection) + if self.ips.get(connection.ip) == connection: # Delete if same as in registry del self.ips[connection.ip] - # Site locked connection - if connection.target_onion: - if self.ips.get(connection.ip + connection.target_onion) == connection: - del self.ips[connection.ip + connection.target_onion] - # Cert pinned connection - if connection.cert_pin and self.ips.get(connection.ip + "#" + connection.cert_pin) == connection: - del self.ips[connection.ip + "#" + connection.cert_pin] - if connection in self.connections: self.connections.remove(connection) def checkConnections(self): - run_i = 0 - time.sleep(15) while self.running: - run_i += 1 + time.sleep(60) # Sleep 1 min self.ip_incoming = {} # Reset connected ips counter - last_message_time = 0 - s = time.time() + self.broken_ssl_peer_ids = {} # Reset broken ssl peerids count for connection in self.connections[:]: # Make a copy - if connection.ip.endswith(".onion") or config.tor == "always": - timeout_multipler = 2 - else: - timeout_multipler = 1 - idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time) - if connection.last_message_time > last_message_time and not connection.is_private_ip: - # Message from local IPs does not means internet connection - last_message_time = connection.last_message_time if connection.unpacker and idle > 30: # Delete the unpacker if not needed del connection.unpacker connection.unpacker = None - - elif connection.last_cmd_sent == "announce" and idle > 20: # Bootstrapper connection close after 20 sec - connection.close("[Cleanup] Tracker connection, idle: %.3fs" % idle) + connection.log("Unpacker deleted") if idle > 60 * 60: # Wake up after 1h - connection.close("[Cleanup] After wakeup, idle: %.3fs" % idle) + connection.log("[Cleanup] After wakeup, idle: %s" % idle) + connection.close() elif idle > 20 * 60 and connection.last_send_time < time.time() - 10: - # Idle more than 20 min and we have not sent request in last 10 sec - if not connection.ping(): - connection.close("[Cleanup] Ping timeout") + # Idle more than 20 min and we not send request in last 10 sec + if not connection.ping(): # send ping request + connection.close() - elif idle > 10 * timeout_multipler and connection.incomplete_buff_recv > 0: - # Incomplete data with more than 10 sec idle - connection.close("[Cleanup] Connection buff stalled") + elif idle > 10 and connection.incomplete_buff_recv > 0: + # Incompelte data with more than 10 sec idle + connection.log("[Cleanup] Connection buff stalled") + connection.close() - elif idle > 10 * timeout_multipler and connection.protocol == "?": # No connection after 10 sec - connection.close( - "[Cleanup] Connect timeout: %.3fs" % idle - ) - - elif idle > 10 * timeout_multipler and connection.waiting_requests and time.time() - connection.last_send_time > 10 * timeout_multipler: + elif idle > 10 and connection.waiting_requests and time.time() - connection.last_send_time > 10: # Sent command and no response in 10 sec - connection.close( - "[Cleanup] Command %s timeout: %.3fs" % (connection.last_cmd_sent, time.time() - connection.last_send_time) + connection.log( + "[Cleanup] Command %s timeout: %s" % (connection.last_cmd, time.time() - connection.last_send_time) ) + connection.close() - elif idle < 60 and connection.bad_actions > 40: - connection.close( - "[Cleanup] Too many bad actions: %s" % connection.bad_actions - ) - - elif idle > 5 * 60 and connection.sites == 0: - connection.close( - "[Cleanup] No site for connection" - ) - - elif run_i % 90 == 0: - # Reset bad action counter every 30 min - connection.bad_actions = 0 - - # Internet outage detection - if time.time() - last_message_time > max(60, 60 * 10 / max(1, float(len(self.connections)) / 50)): - # Offline: Last message more than 60-600sec depending on connection number - if self.has_internet and last_message_time: - self.has_internet = False - self.onInternetOffline() - else: - # Online - if not self.has_internet: - self.has_internet = True - self.onInternetOnline() - - self.timecorrection = self.getTimecorrection() - - if time.time() - s > 0.01: - self.log.debug("Connection cleanup in %.3fs" % (time.time() - s)) - - time.sleep(15) - self.log.debug("Checkconnections ended") - - @util.Noparallel(blocking=False) - def checkMaxConnections(self): - if len(self.connections) < config.global_connected_limit: - return 0 - - s = time.time() - num_connected_before = len(self.connections) - self.connections.sort(key=lambda connection: connection.sites) - num_closed = 0 - for connection in self.connections: - idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time) - if idle > 60: - connection.close("Connection limit reached") - num_closed += 1 - if num_closed > config.global_connected_limit * 0.1: - break - - self.log.debug("Closed %s connections of %s after reached limit %s in %.3fs" % ( - num_closed, num_connected_before, config.global_connected_limit, time.time() - s - )) - return num_closed - - def onInternetOnline(self): - self.log.info("Internet online") - - def onInternetOffline(self): - self.had_external_incoming = False - self.log.info("Internet offline") - - def getTimecorrection(self): - corrections = sorted([ - connection.handshake.get("time") - connection.handshake_time + connection.last_ping_delay - for connection in self.connections - if connection.handshake.get("time") and connection.last_ping_delay - ]) - if len(corrections) < 9: - return 0.0 - mid = int(len(corrections) / 2 - 1) - median = (corrections[mid - 1] + corrections[mid] + corrections[mid + 1]) / 3 - return median + elif idle > 60 and connection.protocol == "?": # No connection after 1 min + connection.log("[Cleanup] Connect timeout: %s" % idle) + connection.close() diff --git a/src/Connection/__init__.py b/src/Connection/__init__.py index d419a3f0..5bd29c6e 100644 --- a/src/Connection/__init__.py +++ b/src/Connection/__init__.py @@ -1,2 +1,2 @@ -from .ConnectionServer import ConnectionServer -from .Connection import Connection +from ConnectionServer import ConnectionServer +from Connection import Connection diff --git a/src/Content/ContentDb.py b/src/Content/ContentDb.py deleted file mode 100644 index f284581e..00000000 --- a/src/Content/ContentDb.py +++ /dev/null @@ -1,162 +0,0 @@ -import os - -from Db.Db import Db, DbTableError -from Config import config -from Plugin import PluginManager -from Debug import Debug - - -@PluginManager.acceptPlugins -class ContentDb(Db): - def __init__(self, path): - Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, path) - self.foreign_keys = True - - def init(self): - try: - self.schema = self.getSchema() - try: - self.checkTables() - except DbTableError: - pass - self.log.debug("Checking foreign keys...") - foreign_key_error = self.execute("PRAGMA foreign_key_check").fetchone() - if foreign_key_error: - raise Exception("Database foreign key error: %s" % foreign_key_error) - except Exception as err: - self.log.error("Error loading content.db: %s, rebuilding..." % Debug.formatException(err)) - self.close() - os.unlink(self.db_path) # Remove and try again - Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, self.db_path) - self.foreign_keys = True - self.schema = self.getSchema() - try: - self.checkTables() - except DbTableError: - pass - self.site_ids = {} - self.sites = {} - - def getSchema(self): - schema = {} - schema["db_name"] = "ContentDb" - schema["version"] = 3 - schema["tables"] = {} - - if not self.getTableVersion("site"): - self.log.debug("Migrating from table version-less content.db") - version = int(self.execute("PRAGMA user_version").fetchone()[0]) - if version > 0: - self.checkTables() - self.execute("INSERT INTO keyvalue ?", {"json_id": 0, "key": "table.site.version", "value": 1}) - self.execute("INSERT INTO keyvalue ?", {"json_id": 0, "key": "table.content.version", "value": 1}) - - schema["tables"]["site"] = { - "cols": [ - ["site_id", "INTEGER PRIMARY KEY ASC NOT NULL UNIQUE"], - ["address", "TEXT NOT NULL"] - ], - "indexes": [ - "CREATE UNIQUE INDEX site_address ON site (address)" - ], - "schema_changed": 1 - } - - schema["tables"]["content"] = { - "cols": [ - ["content_id", "INTEGER PRIMARY KEY UNIQUE NOT NULL"], - ["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"], - ["inner_path", "TEXT"], - ["size", "INTEGER"], - ["size_files", "INTEGER"], - ["size_files_optional", "INTEGER"], - ["modified", "INTEGER"] - ], - "indexes": [ - "CREATE UNIQUE INDEX content_key ON content (site_id, inner_path)", - "CREATE INDEX content_modified ON content (site_id, modified)" - ], - "schema_changed": 1 - } - - return schema - - def initSite(self, site): - self.sites[site.address] = site - - def needSite(self, site): - if site.address not in self.site_ids: - self.execute("INSERT OR IGNORE INTO site ?", {"address": site.address}) - self.site_ids = {} - for row in self.execute("SELECT * FROM site"): - self.site_ids[row["address"]] = row["site_id"] - return self.site_ids[site.address] - - def deleteSite(self, site): - site_id = self.site_ids.get(site.address, 0) - if site_id: - self.execute("DELETE FROM site WHERE site_id = :site_id", {"site_id": site_id}) - del self.site_ids[site.address] - del self.sites[site.address] - - def setContent(self, site, inner_path, content, size=0): - self.insertOrUpdate("content", { - "size": size, - "size_files": sum([val["size"] for key, val in content.get("files", {}).items()]), - "size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).items()]), - "modified": int(content.get("modified", 0)) - }, { - "site_id": self.site_ids.get(site.address, 0), - "inner_path": inner_path - }) - - def deleteContent(self, site, inner_path): - self.execute("DELETE FROM content WHERE ?", {"site_id": self.site_ids.get(site.address, 0), "inner_path": inner_path}) - - def loadDbDict(self, site): - res = self.execute( - "SELECT GROUP_CONCAT(inner_path, '|') AS inner_paths FROM content WHERE ?", - {"site_id": self.site_ids.get(site.address, 0)} - ) - row = res.fetchone() - if row and row["inner_paths"]: - inner_paths = row["inner_paths"].split("|") - return dict.fromkeys(inner_paths, False) - else: - return {} - - def getTotalSize(self, site, ignore=None): - params = {"site_id": self.site_ids.get(site.address, 0)} - if ignore: - params["not__inner_path"] = ignore - res = self.execute("SELECT SUM(size) + SUM(size_files) AS size, SUM(size_files_optional) AS size_optional FROM content WHERE ?", params) - row = dict(res.fetchone()) - - if not row["size"]: - row["size"] = 0 - if not row["size_optional"]: - row["size_optional"] = 0 - - return row["size"], row["size_optional"] - - def listModified(self, site, after=None, before=None): - params = {"site_id": self.site_ids.get(site.address, 0)} - if after: - params["modified>"] = after - if before: - params["modified<"] = before - res = self.execute("SELECT inner_path, modified FROM content WHERE ?", params) - return {row["inner_path"]: row["modified"] for row in res} - -content_dbs = {} - - -def getContentDb(path=None): - if not path: - path = "%s/content.db" % config.data_dir - if path not in content_dbs: - content_dbs[path] = ContentDb(path) - content_dbs[path].init() - return content_dbs[path] - -getContentDb() # Pre-connect to default one diff --git a/src/Content/ContentDbDict.py b/src/Content/ContentDbDict.py deleted file mode 100644 index 01df0427..00000000 --- a/src/Content/ContentDbDict.py +++ /dev/null @@ -1,155 +0,0 @@ -import time -import os - -from . import ContentDb -from Debug import Debug -from Config import config - - -class ContentDbDict(dict): - def __init__(self, site, *args, **kwargs): - s = time.time() - self.site = site - self.cached_keys = [] - self.log = self.site.log - self.db = ContentDb.getContentDb() - self.db_id = self.db.needSite(site) - self.num_loaded = 0 - super(ContentDbDict, self).__init__(self.db.loadDbDict(site)) # Load keys from database - self.log.debug("ContentDb init: %.3fs, found files: %s, sites: %s" % (time.time() - s, len(self), len(self.db.site_ids))) - - def loadItem(self, key): - try: - self.num_loaded += 1 - if self.num_loaded % 100 == 0: - if config.verbose: - self.log.debug("Loaded json: %s (latest: %s) called by: %s" % (self.num_loaded, key, Debug.formatStack())) - else: - self.log.debug("Loaded json: %s (latest: %s)" % (self.num_loaded, key)) - content = self.site.storage.loadJson(key) - dict.__setitem__(self, key, content) - except IOError: - if dict.get(self, key): - self.__delitem__(key) # File not exists anymore - raise KeyError(key) - - self.addCachedKey(key) - self.checkLimit() - - return content - - def getItemSize(self, key): - return self.site.storage.getSize(key) - - # Only keep last 10 accessed json in memory - def checkLimit(self): - if len(self.cached_keys) > 10: - key_deleted = self.cached_keys.pop(0) - dict.__setitem__(self, key_deleted, False) - - def addCachedKey(self, key): - if key not in self.cached_keys and key != "content.json" and len(key) > 40: # Always keep keys smaller than 40 char - self.cached_keys.append(key) - - def __getitem__(self, key): - val = dict.get(self, key) - if val: # Already loaded - return val - elif val is None: # Unknown key - raise KeyError(key) - elif val is False: # Loaded before, but purged from cache - return self.loadItem(key) - - def __setitem__(self, key, val): - self.addCachedKey(key) - self.checkLimit() - size = self.getItemSize(key) - self.db.setContent(self.site, key, val, size) - dict.__setitem__(self, key, val) - - def __delitem__(self, key): - self.db.deleteContent(self.site, key) - dict.__delitem__(self, key) - try: - self.cached_keys.remove(key) - except ValueError: - pass - - def iteritems(self): - for key in dict.keys(self): - try: - val = self[key] - except Exception as err: - self.log.warning("Error loading %s: %s" % (key, err)) - continue - yield key, val - - def items(self): - back = [] - for key in dict.keys(self): - try: - val = self[key] - except Exception as err: - self.log.warning("Error loading %s: %s" % (key, err)) - continue - back.append((key, val)) - return back - - def values(self): - back = [] - for key, val in dict.iteritems(self): - if not val: - try: - val = self.loadItem(key) - except Exception: - continue - back.append(val) - return back - - def get(self, key, default=None): - try: - return self.__getitem__(key) - except KeyError: - return default - except Exception as err: - self.site.bad_files[key] = self.site.bad_files.get(key, 1) - dict.__delitem__(self, key) - self.log.warning("Error loading %s: %s" % (key, err)) - return default - - def execute(self, query, params={}): - params["site_id"] = self.db_id - return self.db.execute(query, params) - -if __name__ == "__main__": - import psutil - process = psutil.Process(os.getpid()) - s_mem = process.memory_info()[0] / float(2 ** 20) - root = "data-live/1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27" - contents = ContentDbDict("1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27", root) - print("Init len", len(contents)) - - s = time.time() - for dir_name in os.listdir(root + "/data/users/")[0:8000]: - contents["data/users/%s/content.json" % dir_name] - print("Load: %.3fs" % (time.time() - s)) - - s = time.time() - found = 0 - for key, val in contents.items(): - found += 1 - assert key - assert val - print("Found:", found) - print("Iteritem: %.3fs" % (time.time() - s)) - - s = time.time() - found = 0 - for key in list(contents.keys()): - found += 1 - assert key in contents - print("In: %.3fs" % (time.time() - s)) - - print("Len:", len(list(contents.values())), len(list(contents.keys()))) - - print("Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem) diff --git a/src/Content/ContentManager.py b/src/Content/ContentManager.py index 623cc707..df400b99 100644 --- a/src/Content/ContentManager.py +++ b/src/Content/ContentManager.py @@ -3,8 +3,6 @@ import time import re import os import copy -import base64 -import sys import gevent @@ -12,65 +10,24 @@ from Debug import Debug from Crypt import CryptHash from Config import config from util import helper -from util import Diff -from util import SafeRe from Peer import PeerHashfield -from .ContentDbDict import ContentDbDict -from Plugin import PluginManager -class VerifyError(Exception): - pass - - -class SignError(Exception): - pass - - -@PluginManager.acceptPlugins class ContentManager(object): def __init__(self, site): self.site = site self.log = self.site.log - self.contents = ContentDbDict(site) + self.contents = {} # Known content.json (without files and includes) self.hashfield = PeerHashfield() - self.has_optional_files = False - - # Load all content.json files - def loadContents(self): - if len(self.contents) == 0: - self.log.info("ContentDb not initialized, load files from filesystem...") - self.loadContent(add_bad_files=False, delete_removed_files=False) - self.site.settings["size"], self.site.settings["size_optional"] = self.getTotalSize() - - # Load hashfield cache - if "hashfield" in self.site.settings.get("cache", {}): - self.hashfield.frombytes(base64.b64decode(self.site.settings["cache"]["hashfield"])) - del self.site.settings["cache"]["hashfield"] - elif self.contents.get("content.json") and self.site.settings["size_optional"] > 0: - self.site.storage.updateBadFiles() # No hashfield cache created yet - self.has_optional_files = bool(self.hashfield) - - self.contents.db.initSite(self.site) - - def getFileChanges(self, old_files, new_files): - deleted = {key: val for key, val in old_files.items() if key not in new_files} - deleted_hashes = {val.get("sha512"): key for key, val in old_files.items() if key not in new_files} - added = {key: val for key, val in new_files.items() if key not in old_files} - renamed = {} - for relative_path, node in added.items(): - hash = node.get("sha512") - if hash in deleted_hashes: - relative_path_old = deleted_hashes[hash] - renamed[relative_path_old] = relative_path - del(deleted[relative_path_old]) - return list(deleted), renamed + self.site.onFileDone.append(lambda inner_path: self.addOptionalFile(inner_path)) + self.loadContent(add_bad_files=False, delete_removed_files=False) + self.site.settings["size"] = self.getTotalSize() # Load content.json to self.content # Return: Changed files ["index.html", "data/messages.json"], Deleted files ["old.jpg"] def loadContent(self, content_inner_path="content.json", add_bad_files=True, delete_removed_files=True, load_includes=True, force=False): - content_inner_path = content_inner_path.strip("/") # Remove / from beginning + content_inner_path = content_inner_path.strip("/") # Remove / from begning old_content = self.contents.get(content_inner_path) content_path = self.site.storage.getPath(content_inner_path) content_dir = helper.getDirname(self.site.storage.getPath(content_inner_path)) @@ -83,17 +40,17 @@ class ContentManager(object): for line in open(content_path): if '"modified"' not in line: continue - match = re.search(r"([0-9\.]+),$", line.strip(" \r\n")) + match = re.search("([0-9\.]+),$", line.strip(" \r\n")) if match and float(match.group(1)) <= old_content.get("modified", 0): - self.log.debug("%s loadContent same json file, skipping" % content_inner_path) + self.log.debug("loadContent same json file, skipping") return [], [] - new_content = self.site.storage.loadJson(content_inner_path) - except Exception as err: - self.log.warning("%s load error: %s" % (content_path, Debug.formatException(err))) + new_content = json.load(open(content_path)) + except Exception, err: + self.log.error("%s load error: %s" % (content_path, Debug.formatException(err))) return [], [] else: - self.log.debug("Content.json not exist: %s" % content_path) + self.log.error("Content.json not exist: %s" % content_path) return [], [] # Content.json not exist try: @@ -101,10 +58,10 @@ class ContentManager(object): changed = [] deleted = [] # Check changed - for relative_path, info in new_content.get("files", {}).items(): + for relative_path, info in new_content.get("files", {}).iteritems(): if "sha512" in info: hash_type = "sha512" - else: # Backward compatibility + else: # Backward compatiblity hash_type = "sha1" new_hash = info[hash_type] @@ -116,25 +73,22 @@ class ContentManager(object): changed.append(content_inner_dir + relative_path) # Check changed optional files - for relative_path, info in new_content.get("files_optional", {}).items(): + for relative_path, info in new_content.get("files_optional", {}).iteritems(): file_inner_path = content_inner_dir + relative_path new_hash = info["sha512"] - if old_content and old_content.get("files_optional", {}).get(relative_path): - # We have the file in the old content + if old_content and old_content.get("files_optional", {}).get(relative_path): # We have the file in the old content old_hash = old_content["files_optional"][relative_path].get("sha512") - if old_hash != new_hash and self.site.isDownloadable(file_inner_path): - changed.append(file_inner_path) # Download new file - elif old_hash != new_hash and self.hashfield.hasHash(old_hash) and not self.site.settings.get("own"): + if old_hash != new_hash and self.site.settings.get("autodownloadoptional"): + changed.append(content_inner_dir + relative_path) # Download new file + elif old_hash != new_hash and not self.site.settings.get("own"): try: - old_hash_id = self.hashfield.getHashId(old_hash) - self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][relative_path]["size"]) - self.optionalDelete(file_inner_path) + self.site.storage.delete(file_inner_path) self.log.debug("Deleted changed optional file: %s" % file_inner_path) - except Exception as err: - self.log.warning("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err))) + except Exception, err: + self.log.debug("Error deleting file %s: %s" % (file_inner_path, err)) else: # The file is not in the old content - if self.site.isDownloadable(file_inner_path): - changed.append(file_inner_path) # Download new file + if self.site.settings.get("autodownloadoptional"): + changed.append(content_inner_dir + relative_path) # Download new file # Check deleted if old_content: @@ -148,104 +102,19 @@ class ContentManager(object): **new_content.get("files_optional", {}) ) - deleted, renamed = self.getFileChanges(old_files, new_files) - - for relative_path_old, relative_path_new in renamed.items(): - self.log.debug("Renaming: %s -> %s" % (relative_path_old, relative_path_new)) - if relative_path_new in new_content.get("files_optional", {}): - self.optionalRenamed(content_inner_dir + relative_path_old, content_inner_dir + relative_path_new) - if self.site.storage.isFile(relative_path_old): - try: - self.site.storage.rename(relative_path_old, relative_path_new) - if relative_path_new in changed: - changed.remove(relative_path_new) - self.log.debug("Renamed: %s -> %s" % (relative_path_old, relative_path_new)) - except Exception as err: - self.log.warning("Error renaming file: %s -> %s %s" % (relative_path_old, relative_path_new, err)) - + deleted = [content_inner_dir + key for key in old_files if key not in new_files] if deleted and not self.site.settings.get("own"): # Deleting files that no longer in content.json - for file_relative_path in deleted: - file_inner_path = content_inner_dir + file_relative_path + for file_inner_path in deleted: try: - # Check if the deleted file is optional - if old_content.get("files_optional") and old_content["files_optional"].get(file_relative_path): - self.optionalDelete(file_inner_path) - old_hash = old_content["files_optional"][file_relative_path].get("sha512") - if self.hashfield.hasHash(old_hash): - old_hash_id = self.hashfield.getHashId(old_hash) - self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][file_relative_path]["size"]) - else: - self.site.storage.delete(file_inner_path) - + self.site.storage.delete(file_inner_path) self.log.debug("Deleted file: %s" % file_inner_path) - except Exception as err: - self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err))) - - # Cleanup empty dirs - tree = {root: [dirs, files] for root, dirs, files in os.walk(self.site.storage.getPath(content_inner_dir))} - for root in sorted(tree, key=len, reverse=True): - dirs, files = tree[root] - if dirs == [] and files == []: - root_inner_path = self.site.storage.getInnerPath(root.replace("\\", "/")) - self.log.debug("Empty directory: %s, cleaning up." % root_inner_path) - try: - self.site.storage.deleteDir(root_inner_path) - # Remove from tree dict to reflect changed state - tree[os.path.dirname(root)][0].remove(os.path.basename(root)) - except Exception as err: - self.log.debug("Error deleting empty directory %s: %s" % (root_inner_path, err)) - - # Check archived - if old_content and "user_contents" in new_content and "archived" in new_content["user_contents"]: - old_archived = old_content.get("user_contents", {}).get("archived", {}) - new_archived = new_content.get("user_contents", {}).get("archived", {}) - self.log.debug("old archived: %s, new archived: %s" % (len(old_archived), len(new_archived))) - archived_changed = { - key: date_archived - for key, date_archived in new_archived.items() - if old_archived.get(key) != new_archived[key] - } - if archived_changed: - self.log.debug("Archived changed: %s" % archived_changed) - for archived_dirname, date_archived in archived_changed.items(): - archived_inner_path = content_inner_dir + archived_dirname + "/content.json" - if self.contents.get(archived_inner_path, {}).get("modified", 0) < date_archived: - self.removeContent(archived_inner_path) - deleted += archived_inner_path - self.site.settings["size"], self.site.settings["size_optional"] = self.getTotalSize() - - # Check archived before - if old_content and "user_contents" in new_content and "archived_before" in new_content["user_contents"]: - old_archived_before = old_content.get("user_contents", {}).get("archived_before", 0) - new_archived_before = new_content.get("user_contents", {}).get("archived_before", 0) - if old_archived_before != new_archived_before: - self.log.debug("Archived before changed: %s -> %s" % (old_archived_before, new_archived_before)) - - # Remove downloaded archived files - num_removed_contents = 0 - for archived_inner_path in self.listModified(before=new_archived_before): - if archived_inner_path.startswith(content_inner_dir) and archived_inner_path != content_inner_path: - self.removeContent(archived_inner_path) - num_removed_contents += 1 - self.site.settings["size"], self.site.settings["size_optional"] = self.getTotalSize() - - # Remove archived files from download queue - num_removed_bad_files = 0 - for bad_file in list(self.site.bad_files.keys()): - if bad_file.endswith("content.json"): - del self.site.bad_files[bad_file] - num_removed_bad_files += 1 - - if num_removed_bad_files > 0: - self.site.worker_manager.removeSolvedFileTasks(mark_as_good=False) - gevent.spawn(self.site.update, since=0) - - self.log.debug("Archived removed contents: %s, removed bad files: %s" % (num_removed_contents, num_removed_bad_files)) + except Exception, err: + self.log.debug("Error deleting file %s: %s" % (file_inner_path, err)) # Load includes if load_includes and "includes" in new_content: - for relative_path, info in list(new_content["includes"].items()): + for relative_path, info in new_content["includes"].items(): include_inner_path = content_inner_dir + relative_path if self.site.storage.isFile(include_inner_path): # Content.json exists, load it include_changed, include_deleted = self.loadContent( @@ -274,138 +143,43 @@ class ContentManager(object): if include_deleted: deleted += include_deleted # Add changed files - # Save some memory - new_content["signs"] = None - if "cert_sign" in new_content: - new_content["cert_sign"] = None - - if new_content.get("files_optional"): - self.has_optional_files = True # Update the content self.contents[content_inner_path] = new_content - except Exception as err: - self.log.warning("%s parse error: %s" % (content_inner_path, Debug.formatException(err))) + except Exception, err: + self.log.error("Content.json parse error: %s" % Debug.formatException(err)) return [], [] # Content.json parse error # Add changed files to bad files if add_bad_files: for inner_path in changed: self.site.bad_files[inner_path] = self.site.bad_files.get(inner_path, 0) + 1 - for inner_path in deleted: - if inner_path in self.site.bad_files: - del self.site.bad_files[inner_path] - self.site.worker_manager.removeSolvedFileTasks() - if new_content.get("modified", 0) > self.site.settings.get("modified", 0): + if new_content["modified"] > self.site.settings.get("modified", 0): # Dont store modifications in the far future (more than 10 minute) self.site.settings["modified"] = min(time.time() + 60 * 10, new_content["modified"]) return changed, deleted - def removeContent(self, inner_path): - inner_dir = helper.getDirname(inner_path) - try: - content = self.contents[inner_path] - files = dict( - content.get("files", {}), - **content.get("files_optional", {}) - ) - except Exception as err: - self.log.debug("Error loading %s for removeContent: %s" % (inner_path, Debug.formatException(err))) - files = {} - files["content.json"] = True - # Deleting files that no longer in content.json - for file_relative_path in files: - file_inner_path = inner_dir + file_relative_path - try: - self.site.storage.delete(file_inner_path) - self.log.debug("Deleted file: %s" % file_inner_path) - except Exception as err: - self.log.debug("Error deleting file %s: %s" % (file_inner_path, err)) - try: - self.site.storage.deleteDir(inner_dir) - except Exception as err: - self.log.debug("Error deleting dir %s: %s" % (inner_dir, err)) - - try: - del self.contents[inner_path] - except Exception as err: - self.log.debug("Error key from contents: %s" % inner_path) - # Get total size of site # Return: 32819 (size of files in kb) def getTotalSize(self, ignore=None): - return self.contents.db.getTotalSize(self.site, ignore) - - def listModified(self, after=None, before=None): - return self.contents.db.listModified(self.site, after=after, before=before) - - def listContents(self, inner_path="content.json", user_files=False): - if inner_path not in self.contents: - return [] - back = [inner_path] - content_inner_dir = helper.getDirname(inner_path) - for relative_path in list(self.contents[inner_path].get("includes", {}).keys()): - include_inner_path = content_inner_dir + relative_path - back += self.listContents(include_inner_path) - return back - - # Returns if file with the given modification date is archived or not - def isArchived(self, inner_path, modified): - match = re.match(r"(.*)/(.*?)/", inner_path) - if not match: - return False - user_contents_inner_path = match.group(1) + "/content.json" - relative_directory = match.group(2) - - file_info = self.getFileInfo(user_contents_inner_path) - if file_info: - time_archived_before = file_info.get("archived_before", 0) - time_directory_archived = file_info.get("archived", {}).get(relative_directory, 0) - if modified <= time_archived_before or modified <= time_directory_archived: - return True - else: - return False - else: - return False - - def isDownloaded(self, inner_path, hash_id=None): - if not hash_id: - file_info = self.getFileInfo(inner_path) - if not file_info or "sha512" not in file_info: - return False - hash_id = self.hashfield.getHashId(file_info["sha512"]) - return hash_id in self.hashfield - - # Is modified since signing - def isModified(self, inner_path): - s = time.time() - if inner_path.endswith("content.json"): - try: - is_valid = self.verifyFile(inner_path, self.site.storage.open(inner_path), ignore_same=False) - if is_valid: - is_modified = False - else: - is_modified = True - except VerifyError: - is_modified = True - else: - try: - self.verifyFile(inner_path, self.site.storage.open(inner_path), ignore_same=False) - is_modified = False - except VerifyError: - is_modified = True - return is_modified + total_size = 0 + for inner_path, content in self.contents.iteritems(): + if inner_path == ignore: + continue + total_size += self.site.storage.getSize(inner_path) # Size of content.json + for file, info in content.get("files", {}).iteritems(): + total_size += info["size"] + return total_size # Find the file info line from self.contents # Return: { "sha512": "c29d73d...21f518", "size": 41 , "content_inner_path": "content.json"} - def getFileInfo(self, inner_path, new_file=False): + def getFileInfo(self, inner_path): dirs = inner_path.split("/") # Parent dirs of content.json inner_path_parts = [dirs.pop()] # Filename relative to content.json while True: content_inner_path = "%s/content.json" % "/".join(dirs) - content_inner_path = content_inner_path.strip("/") - content = self.contents.get(content_inner_path) + content = self.contents.get(content_inner_path.strip("/")) # Check in files if content and "files" in content: @@ -413,7 +187,6 @@ class ContentManager(object): if back: back["content_inner_path"] = content_inner_path back["optional"] = False - back["relative_path"] = "/".join(inner_path_parts) return back # Check in optional files @@ -422,28 +195,13 @@ class ContentManager(object): if back: back["content_inner_path"] = content_inner_path back["optional"] = True - back["relative_path"] = "/".join(inner_path_parts) return back # Return the rules if user dir if content and "user_contents" in content: back = content["user_contents"] - content_inner_path_dir = helper.getDirname(content_inner_path) - relative_content_path = inner_path[len(content_inner_path_dir):] - user_auth_address_match = re.match(r"([A-Za-z0-9]+)/.*", relative_content_path) - if user_auth_address_match: - user_auth_address = user_auth_address_match.group(1) - back["content_inner_path"] = "%s%s/content.json" % (content_inner_path_dir, user_auth_address) - else: - back["content_inner_path"] = content_inner_path_dir + "content.json" - back["optional"] = None - back["relative_path"] = "/".join(inner_path_parts) - return back - - if new_file and content: - back = {} - back["content_inner_path"] = content_inner_path - back["relative_path"] = "/".join(inner_path_parts) + # Content.json is in the users dir + back["content_inner_path"] = re.sub("(.*)/.*?$", "\\1/content.json", inner_path) back["optional"] = None return back @@ -464,12 +222,6 @@ class ContentManager(object): if not file_info: return False # File not found inner_path = file_info["content_inner_path"] - - if inner_path == "content.json": # Root content.json - rules = {} - rules["signers"] = self.getValidSigners(inner_path, content) - return rules - dirs = inner_path.split("/") # Parent dirs of content.json inner_path_parts = [dirs.pop()] # Filename relative to content.json inner_path_parts.insert(0, dirs.pop()) # Dont check in self dir @@ -492,40 +244,30 @@ class ContentManager(object): # Return: The rules of the file or False if not allowed def getUserContentRules(self, parent_content, inner_path, content): user_contents = parent_content["user_contents"] - - # Delivered for directory - if "inner_path" in parent_content: - parent_content_dir = helper.getDirname(parent_content["inner_path"]) - user_address = re.match(r"([A-Za-z0-9]*?)/", inner_path[len(parent_content_dir):]).group(1) - else: - user_address = re.match(r".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) + user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) # Delivered for directory try: if not content: content = self.site.storage.loadJson(inner_path) # Read the file if no content specified - user_urn = "%s/%s" % (content["cert_auth_type"], content["cert_user_id"]) # web/nofish@zeroid.bit - cert_user_id = content["cert_user_id"] - except Exception: # Content.json not exist - user_urn = "n-a/n-a" - cert_user_id = "n-a" + except (Exception, ): # Content.json not exist + return {"signers": [user_address], "user_address": user_address} # Return information that we know for sure - if user_address in user_contents["permissions"]: - rules = copy.copy(user_contents["permissions"].get(user_address, {})) # Default rules based on address - else: - rules = copy.copy(user_contents["permissions"].get(cert_user_id, {})) # Default rules based on username + """if not "cert_user_name" in content: # New file, unknown user + content["cert_auth_type"] = "unknown" + content["cert_user_name"] = "unknown@unknown" + """ + user_urn = "%s/%s" % (content["cert_auth_type"], content["cert_user_id"]) # web/nofish@zeroid.bit + rules = copy.copy(user_contents["permissions"].get(content["cert_user_id"], {})) # Default rules by username if rules is False: - banned = True - rules = {} - else: - banned = False + return False # User banned if "signers" in rules: rules["signers"] = rules["signers"][:] # Make copy of the signers - for permission_pattern, permission_rules in list(user_contents["permission_rules"].items()): # Regexp rules - if not SafeRe.match(permission_pattern, user_urn): + for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules + if not re.match(permission_pattern, user_urn): continue # Rule is not valid for user # Update rules if its better than current recorded ones - for key, val in permission_rules.items(): + for key, val in permission_rules.iteritems(): if key not in rules: if type(val) is list: rules[key] = val[:] # Make copy @@ -540,154 +282,67 @@ class ContentManager(object): elif type(val) is list: # List, append rules[key] += val - # Accepted cert signers - rules["cert_signers"] = user_contents.get("cert_signers", {}) - rules["cert_signers_pattern"] = user_contents.get("cert_signers_pattern") - + rules["cert_signers"] = user_contents["cert_signers"] # Add valid cert signers if "signers" not in rules: rules["signers"] = [] - - if not banned: - rules["signers"].append(user_address) # Add user as valid signer + rules["signers"].append(user_address) # Add user as valid signer rules["user_address"] = user_address rules["includes_allowed"] = False return rules - # Get diffs for changed files - def getDiffs(self, inner_path, limit=30 * 1024, update_files=True): - if inner_path not in self.contents: - return {} - diffs = {} - content_inner_path_dir = helper.getDirname(inner_path) - for file_relative_path in self.contents[inner_path].get("files", {}): - file_inner_path = content_inner_path_dir + file_relative_path - if self.site.storage.isFile(file_inner_path + "-new"): # New version present - diffs[file_relative_path] = Diff.diff( - list(self.site.storage.open(file_inner_path)), - list(self.site.storage.open(file_inner_path + "-new")), - limit=limit - ) - if update_files: - self.site.storage.delete(file_inner_path) - self.site.storage.rename(file_inner_path + "-new", file_inner_path) - if self.site.storage.isFile(file_inner_path + "-old"): # Old version present - diffs[file_relative_path] = Diff.diff( - list(self.site.storage.open(file_inner_path + "-old")), - list(self.site.storage.open(file_inner_path)), - limit=limit - ) - if update_files: - self.site.storage.delete(file_inner_path + "-old") - return diffs - - def hashFile(self, dir_inner_path, file_relative_path, optional=False): - back = {} - file_inner_path = dir_inner_path + "/" + file_relative_path - - file_path = self.site.storage.getPath(file_inner_path) - file_size = os.path.getsize(file_path) - sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file - if optional and not self.hashfield.hasHash(sha512sum): - self.optionalDownloaded(file_inner_path, self.hashfield.getHashId(sha512sum), file_size, own=True) - - back[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)} - return back - - def isValidRelativePath(self, relative_path): - if ".." in relative_path.replace("\\", "/").split("/"): - return False - elif len(relative_path) > 255: - return False - elif relative_path[0] in ("/", "\\"): # Starts with - return False - elif relative_path[-1] in (".", " "): # Ends with - return False - elif re.match(r".*(^|/)(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9]|CONOUT\$|CONIN\$)(\.|/|$)", relative_path, re.IGNORECASE): # Protected on Windows - return False - else: - return re.match(r"^[^\x00-\x1F\"*:<>?\\|]+$", relative_path) - - def sanitizePath(self, inner_path): - return re.sub("[\x00-\x1F\"*:<>?\\|]", "", inner_path) - # Hash files in directory def hashFiles(self, dir_inner_path, ignore_pattern=None, optional_pattern=None): files_node = {} files_optional_node = {} - db_inner_path = self.site.storage.getDbFile() - if dir_inner_path and not self.isValidRelativePath(dir_inner_path): + if not re.match("^[a-zA-Z0-9_\.\+-/]*$", dir_inner_path): ignored = True self.log.error("- [ERROR] Only ascii encoded directories allowed: %s" % dir_inner_path) - for file_relative_path in self.site.storage.walk(dir_inner_path, ignore_pattern): + for file_relative_path in self.site.storage.list(dir_inner_path): file_name = helper.getFilename(file_relative_path) ignored = optional = False if file_name == "content.json": ignored = True - elif file_name.startswith(".") or file_name.endswith("-old") or file_name.endswith("-new"): + elif ignore_pattern and re.match(ignore_pattern, file_relative_path): ignored = True - elif not self.isValidRelativePath(file_relative_path): + elif file_name.startswith("."): ignored = True - self.log.error("- [ERROR] Invalid filename: %s" % file_relative_path) - elif dir_inner_path == "" and db_inner_path and file_relative_path.startswith(db_inner_path): + elif not re.match("^[a-zA-Z0-9_\.\+\-/]+$", file_relative_path): ignored = True - elif optional_pattern and SafeRe.match(optional_pattern, file_relative_path): + self.log.error("- [ERROR] Only ascii encodes filenames allowed: %s" % file_relative_path) + elif optional_pattern and re.match(optional_pattern, file_relative_path): optional = True - if ignored: # Ignore content.json, defined regexp and files starting with . + if ignored: # Ignore content.json, definied regexp and files starting with . self.log.info("- [SKIPPED] %s" % file_relative_path) else: + file_path = self.site.storage.getPath(dir_inner_path + "/" + file_relative_path) + sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file if optional: - self.log.info("- [OPTIONAL] %s" % file_relative_path) - files_optional_node.update( - self.hashFile(dir_inner_path, file_relative_path, optional=True) - ) + self.log.info("- [OPTIONAL] %s (SHA512: %s)" % (file_relative_path, sha512sum)) + files_optional_node[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)} + self.hashfield.appendHash(sha512sum) else: - self.log.info("- %s" % file_relative_path) - files_node.update( - self.hashFile(dir_inner_path, file_relative_path) - ) + self.log.info("- %s (SHA512: %s)" % (file_relative_path, sha512sum)) + files_node[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)} return files_node, files_optional_node # Create and sign a content.json # Return: The new content if filewrite = False - def sign(self, inner_path="content.json", privatekey=None, filewrite=True, update_changed_files=False, extend=None, remove_missing_optional=False): - if not inner_path.endswith("content.json"): - raise SignError("Invalid file name, you can only sign content.json files") - - if inner_path in self.contents: - content = self.contents.get(inner_path) - if content and content.get("cert_sign", False) is None and self.site.storage.isFile(inner_path): - # Recover cert_sign from file - content["cert_sign"] = self.site.storage.loadJson(inner_path).get("cert_sign") - else: - content = None + def sign(self, inner_path="content.json", privatekey=None, filewrite=True, update_changed_files=False, extend=None): + content = self.contents.get(inner_path) if not content: # Content not exist yet, load default one self.log.info("File %s not exist yet, loading default values..." % inner_path) - - if self.site.storage.isFile(inner_path): - content = self.site.storage.loadJson(inner_path) - if "files" not in content: - content["files"] = {} - if "signs" not in content: - content["signs"] = {} - else: - content = {"files": {}, "signs": {}} # Default content.json - + content = {"files": {}, "signs": {}} # Default content.json if inner_path == "content.json": # It's the root content.json, add some more fields content["title"] = "%s - ZeroNet_" % self.site.address content["description"] = "" content["signs_required"] = 1 content["ignore"] = "" - - if extend: - # Add extend keys if not exists - for key, val in list(extend.items()): - if not content.get(key): - content[key] = val - self.log.info("Extending content.json with: %s" % key) + if extend: + content.update(extend) # Add custom fields directory = helper.getDirname(self.site.storage.getPath(inner_path)) inner_directory = helper.getDirname(inner_path) @@ -698,16 +353,11 @@ class ContentManager(object): helper.getDirname(inner_path), content.get("ignore"), content.get("optional") ) - if not remove_missing_optional: - for file_inner_path, file_details in content.get("files_optional", {}).items(): - if file_inner_path not in files_optional_node: - files_optional_node[file_inner_path] = file_details - # Find changed files files_merged = files_node.copy() files_merged.update(files_optional_node) - for file_relative_path, file_details in files_merged.items(): - old_hash = content.get("files", {}).get(file_relative_path, {}).get("sha512") + for file_relative_path, file_details in files_merged.iteritems(): + old_hash = content["files"].get(file_relative_path, {}).get("sha512") new_hash = files_merged[file_relative_path]["sha512"] if old_hash != new_hash: changed_files.append(inner_directory + file_relative_path) @@ -727,66 +377,53 @@ class ContentManager(object): elif "files_optional" in new_content: del new_content["files_optional"] + new_content["modified"] = time.time() # Add timestamp if inner_path == "content.json": + new_content["address"] = self.site.address new_content["zeronet_version"] = config.version new_content["signs_required"] = content.get("signs_required", 1) - new_content["address"] = self.site.address - new_content["inner_path"] = inner_path - # Verify private key from Crypt import CryptBitcoin self.log.info("Verifying private key...") privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey) valid_signers = self.getValidSigners(inner_path, new_content) if privatekey_address not in valid_signers: - raise SignError( + return self.log.error( "Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address) ) self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers)) - signs_required = 1 if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key, then sign the valid signers - signs_required = new_content["signs_required"] - signers_data = "%s:%s" % (signs_required, ",".join(valid_signers)) - new_content["signers_sign"] = CryptBitcoin.sign(str(signers_data), privatekey) + new_content["signers_sign"] = CryptBitcoin.sign( + "%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey + ) if not new_content["signers_sign"]: self.log.info("Old style address, signers_sign is none") self.log.info("Signing %s..." % inner_path) if "signs" in new_content: - # del(new_content["signs"]) # Delete old signs - old_signs_content = new_content["signs"] - del(new_content["signs"]) - else: - old_signs_content = None + del(new_content["signs"]) # Delete old signs if "sign" in new_content: del(new_content["sign"]) # Delete old sign (backward compatibility) - if signs_required > 1: - has_valid_sign = False - sign_content = json.dumps(new_content, sort_keys=True) - for signer in valid_signers: - res = CryptBitcoin.verify(sign_content,signer,old_signs_content[signer]); - print(res) - if res: - has_valid_sign = has_valid_sign or res - if has_valid_sign: - new_content["modified"] = content["modified"] - sign_content = json.dumps(new_content, sort_keys=True) - else: - new_content["modified"] = int(time.time()) # Add timestamp - sign_content = json.dumps(new_content, sort_keys=True) + sign_content = json.dumps(new_content, sort_keys=True) sign = CryptBitcoin.sign(sign_content, privatekey) # new_content["signs"] = content.get("signs", {}) # TODO: Multisig if sign: # If signing is successful (not an old address) - new_content["signs"] = old_signs_content or {} + new_content["signs"] = {} new_content["signs"][privatekey_address] = sign - self.verifyContent(inner_path, new_content) + if inner_path == "content.json": # To root content.json add old format sign for backward compatibility + oldsign_content = json.dumps(new_content, sort_keys=True) + new_content["sign"] = CryptBitcoin.signOld(oldsign_content, privatekey) + + if not self.verifyContent(inner_path, new_content): + self.log.error("Sign failed: Invalid content") + return False if filewrite: self.log.info("Saving to %s..." % inner_path) @@ -806,7 +443,7 @@ class ContentManager(object): valid_signers = [] if inner_path == "content.json": # Root content.json if "content.json" in self.contents and "signers" in self.contents["content.json"]: - valid_signers += self.contents["content.json"]["signers"][:] + valid_signers += self.contents["content.json"]["signers"].keys() else: rules = self.getRules(inner_path, content) if rules and "signers" in rules: @@ -818,130 +455,82 @@ class ContentManager(object): # Return: The required number of valid signs for the content.json def getSignsRequired(self, inner_path, content=None): - if not content: - return 1 - return content.get("signs_required", 1) - - def verifyCertSign(self, user_address, user_auth_type, user_name, issuer_address, sign): - from Crypt import CryptBitcoin - cert_subject = "%s#%s/%s" % (user_address, user_auth_type, user_name) - return CryptBitcoin.verify(cert_subject, issuer_address, sign) + return 1 # Todo: Multisig def verifyCert(self, inner_path, content): + from Crypt import CryptBitcoin + rules = self.getRules(inner_path, content) - - if not rules: - raise VerifyError("No rules for this file") - - if not rules.get("cert_signers") and not rules.get("cert_signers_pattern"): + if not rules.get("cert_signers"): return True # Does not need cert - if "cert_user_id" not in content: - raise VerifyError("Missing cert_user_id") - - if content["cert_user_id"].count("@") != 1: - raise VerifyError("Invalid domain in cert_user_id") - - name, domain = content["cert_user_id"].rsplit("@", 1) + name, domain = content["cert_user_id"].split("@") cert_address = rules["cert_signers"].get(domain) - if not cert_address: # Unknown Cert signer - if rules.get("cert_signers_pattern") and SafeRe.match(rules["cert_signers_pattern"], domain): - cert_address = domain - else: - raise VerifyError("Invalid cert signer: %s" % domain) - - return self.verifyCertSign(rules["user_address"], content["cert_auth_type"], name, cert_address, content["cert_sign"]) + if not cert_address: # Cert signer not allowed + self.log.error("Invalid cert signer: %s" % domain) + return False + return CryptBitcoin.verify( + "%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"] + ) # Checks if the content.json content is valid # Return: True or False def verifyContent(self, inner_path, content): - content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in list(content["files"].values()) if file["size"] >= 0]) # Size of new content - # Calculate old content size - old_content = self.contents.get(inner_path) - if old_content: - old_content_size = len(json.dumps(old_content, indent=1)) + sum([file["size"] for file in list(old_content.get("files", {}).values())]) - old_content_size_optional = sum([file["size"] for file in list(old_content.get("files_optional", {}).values())]) - else: - old_content_size = 0 - old_content_size_optional = 0 - - # Reset site site on first content.json - if not old_content and inner_path == "content.json": - self.site.settings["size"] = 0 - - content_size_optional = sum([file["size"] for file in list(content.get("files_optional", {}).values()) if file["size"] >= 0]) - site_size = self.site.settings["size"] - old_content_size + content_size # Site size without old content plus the new - site_size_optional = self.site.settings["size_optional"] - old_content_size_optional + content_size_optional # Site size without old content plus the new + content_size = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) # Size of new content + content_size_optional = sum([file["size"] for file in content.get("files_optional", {}).values()]) + site_size = self.getTotalSize(ignore=inner_path) + content_size # Site size without old content + if site_size > self.site.settings.get("size", 0): + self.site.settings["size"] = site_size # Save to settings if larger site_size_limit = self.site.getSizeLimit() * 1024 * 1024 - # Check site address - if content.get("address") and content["address"] != self.site.address: - raise VerifyError("Wrong site address: %s != %s" % (content["address"], self.site.address)) - - # Check file inner path - if content.get("inner_path") and content["inner_path"] != inner_path: - raise VerifyError("Wrong inner_path: %s" % content["inner_path"]) - - # If our content.json file bigger than the size limit throw error - if inner_path == "content.json": - content_size_file = len(json.dumps(content, indent=1)) - if content_size_file > site_size_limit: - # Save site size to display warning - self.site.settings["size"] = site_size - task = self.site.worker_manager.tasks.findTask(inner_path) - if task: # Dont try to download from other peers - self.site.worker_manager.failTask(task) - raise VerifyError("Content too large %s B > %s B, aborting task..." % (site_size, site_size_limit)) - - # Verify valid filenames - for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()): - if not self.isValidRelativePath(file_relative_path): - raise VerifyError("Invalid relative path: %s" % file_relative_path) + # Check total site size limit + if site_size > site_size_limit: + self.log.error("%s: Site too large %s > %s, aborting task..." % (inner_path, site_size, site_size_limit)) + task = self.site.worker_manager.findTask(inner_path) + if task: # Dont try to download from other peers + self.site.worker_manager.failTask(task) + return False if inner_path == "content.json": - self.site.settings["size"] = site_size - self.site.settings["size_optional"] = site_size_optional return True # Root content.json is passed - else: - if self.verifyContentInclude(inner_path, content, content_size, content_size_optional): - self.site.settings["size"] = site_size - self.site.settings["size_optional"] = site_size_optional - return True - else: - raise VerifyError("Content verify error") - def verifyContentInclude(self, inner_path, content, content_size, content_size_optional): # Load include details rules = self.getRules(inner_path, content) if not rules: - raise VerifyError("No rules") + self.log.error("%s: No rules" % inner_path) + return False # Check include size limit if rules.get("max_size") is not None: # Include size limit if content_size > rules["max_size"]: - raise VerifyError("Include too large %sB > %sB" % (content_size, rules["max_size"])) + self.log.error("%s: Include too large %s > %s" % (inner_path, content_size, rules["max_size"])) + return False if rules.get("max_size_optional") is not None: # Include optional files limit if content_size_optional > rules["max_size_optional"]: - raise VerifyError("Include optional files too large %sB > %sB" % ( - content_size_optional, rules["max_size_optional"]) + self.log.error("%s: Include optional files too large %s > %s" % ( + inner_path, content_size_optional, rules["max_size_optional"]) ) + return False # Filename limit if rules.get("files_allowed"): - for file_inner_path in list(content["files"].keys()): - if not SafeRe.match(r"^%s$" % rules["files_allowed"], file_inner_path): - raise VerifyError("File not allowed: %s" % file_inner_path) + for file_inner_path in content["files"].keys(): + if not re.match("^%s$" % rules["files_allowed"], file_inner_path): + self.log.error("%s %s: File not allowed" % (inner_path, file_inner_path)) + return False if rules.get("files_allowed_optional"): - for file_inner_path in list(content.get("files_optional", {}).keys()): - if not SafeRe.match(r"^%s$" % rules["files_allowed_optional"], file_inner_path): - raise VerifyError("Optional file not allowed: %s" % file_inner_path) + for file_inner_path in content.get("files_optional", {}).keys(): + if not re.match("^%s$" % rules["files_allowed_optional"], file_inner_path): + self.log.error("%s %s: Optional file not allowed" % (inner_path, file_inner_path)) + return False # Check if content includes allowed if rules.get("includes_allowed") is False and content.get("includes"): - raise VerifyError("Includes not allowed") + self.log.error("%s: Includes not allowed" % inner_path) + return False # Includes not allowed return True # All good @@ -951,32 +540,22 @@ class ContentManager(object): if inner_path.endswith("content.json"): # content.json: Check using sign from Crypt import CryptBitcoin try: - if type(file) is dict: - new_content = file - else: - try: - if sys.version_info.major == 3 and sys.version_info.minor < 6: - new_content = json.loads(file.read().decode("utf8")) - else: - new_content = json.load(file) - except Exception as err: - raise VerifyError("Invalid json file: %s" % err) + new_content = json.load(file) if inner_path in self.contents: - old_content = self.contents.get(inner_path, {"modified": 0}) + old_content = self.contents.get(inner_path) # Checks if its newer the ours if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json return None elif old_content["modified"] > new_content["modified"]: # We have newer - raise VerifyError( - "We have newer (Our: %s, Sent: %s)" % - (old_content["modified"], new_content["modified"]) + self.log.debug( + "We have newer %s (Our: %s, Sent: %s)" % + (inner_path, old_content["modified"], new_content["modified"]) ) + gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers + return False if new_content["modified"] > time.time() + 60 * 60 * 24: # Content modified in the far future (allow 1 day+) - raise VerifyError("Modify timestamp is in the far future!") - if self.isArchived(inner_path, new_content["modified"]): - if inner_path in self.site.bad_files: - del self.site.bad_files[inner_path] - raise VerifyError("This file is archived!") + self.log.error("%s modify is in the future!" % inner_path) + return False # Check sign sign = new_content.get("sign") signs = new_content.get("signs", {}) @@ -984,84 +563,116 @@ class ContentManager(object): del(new_content["sign"]) # The file signed without the sign if "signs" in new_content: del(new_content["signs"]) # The file signed without the signs - sign_content = json.dumps(new_content, sort_keys=True) # Dump the json to string to remove whitepsace - # Fix float representation error on Android - modified = new_content["modified"] - if config.fix_float_decimals and type(modified) is float and not str(modified).endswith(".0"): - modified_fixed = "{:.6f}".format(modified).strip("0.") - sign_content = sign_content.replace( - '"modified": %s' % repr(modified), - '"modified": %s' % modified_fixed - ) + if not self.verifyContent(inner_path, new_content): + return False # Content not valid (files too large, invalid files) if signs: # New style signing valid_signers = self.getValidSigners(inner_path, new_content) signs_required = self.getSignsRequired(inner_path, new_content) if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json - signers_data = "%s:%s" % (signs_required, ",".join(valid_signers)) - if not CryptBitcoin.verify(signers_data, self.site.address, new_content["signers_sign"]): - raise VerifyError("Invalid signers_sign!") + if not CryptBitcoin.verify( + "%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"] + ): + self.log.error("%s invalid signers_sign!" % inner_path) + return False if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid - raise VerifyError("Invalid cert!") + self.log.error("%s invalid cert!" % inner_path) + return False - valid_signs = [] + valid_signs = 0 for address in valid_signers: if address in signs: - result = CryptBitcoin.verify(sign_content, address, signs[address]) - if result: - valid_signs.append(address) - if len(valid_signs) >= signs_required: + valid_signs += CryptBitcoin.verify(sign_content, address, signs[address]) + if valid_signs >= signs_required: break # Break if we has enough signs - if len(valid_signs) < signs_required: - raise VerifyError("Valid signs: %s/%s, Valid Signers : %s" % (len(valid_signs), signs_required, valid_signs)) - else: - return self.verifyContent(inner_path, new_content) + self.log.debug("%s: Valid signs: %s/%s" % (inner_path, valid_signs, signs_required)) + return valid_signs >= signs_required else: # Old style signing - raise VerifyError("Invalid old-style sign") + return CryptBitcoin.verify(sign_content, self.site.address, sign) - except Exception as err: - self.log.warning("%s: verify sign error: %s" % (inner_path, Debug.formatException(err))) - raise err + except Exception, err: + self.log.error("Verify sign error: %s" % Debug.formatException(err)) + return False else: # Check using sha512 hash file_info = self.getFileInfo(inner_path) if file_info: - if CryptHash.sha512sum(file) != file_info.get("sha512", ""): - raise VerifyError("Invalid hash") - + if "sha512" in file_info: + hash_valid = CryptHash.sha512sum(file) == file_info["sha512"] + elif "sha1" in file_info: # Backward compatibility + hash_valid = CryptHash.sha1sum(file) == file_info["sha1"] + else: + hash_valid = False if file_info.get("size", 0) != file.tell(): - raise VerifyError( - "File size does not match %s <> %s" % - (inner_path, file.tell(), file_info.get("size", 0)) + self.log.error( + "%s file size does not match %s <> %s, Hash: %s" % + (inner_path, file.tell(), file_info.get("size", 0), hash_valid) ) - - return True + return False + return hash_valid else: # File not in content.json - raise VerifyError("File not in content.json") + self.log.error("File not in content.json: %s" % inner_path) + return False - def optionalDelete(self, inner_path): - self.site.storage.delete(inner_path) + def addOptionalFile(self, inner_path): + info = self.getFileInfo(inner_path) + if info and info["optional"]: + self.log.debug("Downloaded optional file, adding to hashfield: %s" % inner_path) + self.hashfield.appendHash(info["sha512"]) - def optionalDownloaded(self, inner_path, hash_id, size=None, own=False): - if size is None: - size = self.site.storage.getSize(inner_path) - done = self.hashfield.appendHashId(hash_id) - self.site.settings["optional_downloaded"] += size - return done +if __name__ == "__main__": + def testSign(): + global config + from Site import Site + site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") + content_manager = ContentManager(site) + content_manager.sign( + "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR" + ) - def optionalRemoved(self, inner_path, hash_id, size=None): - if size is None: - size = self.site.storage.getSize(inner_path) - done = self.hashfield.removeHashId(hash_id) + def testVerify(): + from Site import Site + site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") - self.site.settings["optional_downloaded"] -= size - return done + content_manager = ContentManager(site) + print "Loaded contents:", content_manager.contents.keys() - def optionalRenamed(self, inner_path_old, inner_path_new): - return True + file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json")) + print "content.json valid:", content_manager.verifyFile( + "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False + ) + + file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json")) + print "messages.json valid:", content_manager.verifyFile( + "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False + ) + + def testInfo(): + from Site import Site + site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") + + content_manager = ContentManager(site) + print content_manager.contents.keys() + + print content_manager.getFileInfo("index.html") + print content_manager.getIncludeInfo("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json") + print content_manager.getValidSigners("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json") + print content_manager.getValidSigners("data/users/content.json") + print content_manager.getValidSigners("content.json") + + import sys + import logging + os.chdir("../..") + sys.path.insert(0, os.path.abspath(".")) + sys.path.insert(0, os.path.abspath("src")) + logging.basicConfig(level=logging.DEBUG) + + # testSign() + testVerify() + # testInfo() diff --git a/src/Content/__init__.py b/src/Content/__init__.py index fbbd39f4..fab39f93 100644 --- a/src/Content/__init__.py +++ b/src/Content/__init__.py @@ -1 +1 @@ -from .ContentManager import ContentManager \ No newline at end of file +from ContentManager import ContentManager \ No newline at end of file diff --git a/src/Crypt/Crypt.py b/src/Crypt/Crypt.py deleted file mode 100644 index 7d7d3659..00000000 --- a/src/Crypt/Crypt.py +++ /dev/null @@ -1,4 +0,0 @@ -from Config import config -from util import ThreadPool - -thread_pool_crypt = ThreadPool.ThreadPool(config.threads_crypt) \ No newline at end of file diff --git a/src/Crypt/CryptBitcoin.py b/src/Crypt/CryptBitcoin.py index 68b2caa2..15d76d5f 100644 --- a/src/Crypt/CryptBitcoin.py +++ b/src/Crypt/CryptBitcoin.py @@ -1,101 +1,75 @@ import logging -import base64 -import binascii -import time -import hashlib -from util.Electrum import dbl_format +from lib.BitcoinECC import BitcoinECC +from lib.pybitcointools import bitcoin as btctools from Config import config -import util.OpensslFindPatch - -lib_verify_best = "sslcrypto" - -from lib import sslcrypto -sslcurve_native = sslcrypto.ecc.get_curve("secp256k1") -sslcurve_fallback = sslcrypto.fallback.ecc.get_curve("secp256k1") -sslcurve = sslcurve_native - -def loadLib(lib_name, silent=False): - global sslcurve, libsecp256k1message, lib_verify_best - if lib_name == "libsecp256k1": - s = time.time() - from lib import libsecp256k1message - import coincurve - lib_verify_best = "libsecp256k1" - if not silent: - logging.info( - "Libsecpk256k1 loaded: %s in %.3fs" % - (type(coincurve._libsecp256k1.lib).__name__, time.time() - s) - ) - elif lib_name == "sslcrypto": - sslcurve = sslcurve_native - if sslcurve_native == sslcurve_fallback: - logging.warning("SSLCurve fallback loaded instead of native") - elif lib_name == "sslcrypto_fallback": - sslcurve = sslcurve_fallback - +# Try to load openssl try: - if not config.use_libsecp256k1: + if not config.use_openssl: raise Exception("Disabled by config") - loadLib("libsecp256k1") - lib_verify_best = "libsecp256k1" -except Exception as err: - logging.info("Libsecp256k1 load failed: %s" % err) + from lib.opensslVerify import opensslVerify + logging.info("OpenSSL loaded, version: %s" % opensslVerify.openssl_version) +except Exception, err: + logging.info("OpenSSL load failed: %s, falling back to slow bitcoin verify" % err) + opensslVerify = None -def newPrivatekey(): # Return new private key - return sslcurve.private_to_wif(sslcurve.new_private_key()).decode() +def newPrivatekey(uncompressed=True): # Return new private key + privatekey = btctools.encode_privkey(btctools.random_key(), "wif") + return privatekey def newSeed(): - return binascii.hexlify(sslcurve.new_private_key()).decode() + return btctools.random_key() def hdPrivatekey(seed, child): - # Too large child id could cause problems - privatekey_bin = sslcurve.derive_child(seed.encode(), child % 100000000) - return sslcurve.private_to_wif(privatekey_bin).decode() + masterkey = btctools.bip32_master_key(seed) + childkey = btctools.bip32_ckd(masterkey, child % 100000000) # Too large child id could cause problems + key = btctools.bip32_extract_key(childkey) + return btctools.encode_privkey(key, "wif") def privatekeyToAddress(privatekey): # Return address from private key - try: - if len(privatekey) == 64: - privatekey_bin = bytes.fromhex(privatekey) - else: - privatekey_bin = sslcurve.wif_to_private(privatekey.encode()) - return sslcurve.private_to_address(privatekey_bin).decode() - except Exception: # Invalid privatekey - return False + if privatekey.startswith("23") and len(privatekey) > 52: # Backward compatibility to broken lib + bitcoin = BitcoinECC.Bitcoin() + bitcoin.BitcoinAddressFromPrivate(privatekey) + return bitcoin.BitcoinAddresFromPublicKey() + else: + try: + return btctools.privkey_to_address(privatekey) + except Exception: # Invalid privatekey + return False def sign(data, privatekey): # Return sign to data using private key if privatekey.startswith("23") and len(privatekey) > 52: return None # Old style private key not supported - return base64.b64encode(sslcurve.sign( - data.encode(), - sslcurve.wif_to_private(privatekey.encode()), - recoverable=True, - hash=dbl_format - )).decode() + sign = btctools.ecdsa_sign(data, privatekey) + return sign -def verify(data, valid_address, sign, lib_verify=None): # Verify data using address and sign - if not lib_verify: - lib_verify = lib_verify_best +def signOld(data, privatekey): # Return sign to data using private key (backward compatible old style) + bitcoin = BitcoinECC.Bitcoin() + bitcoin.BitcoinAddressFromPrivate(privatekey) + sign = bitcoin.SignECDSA(data) + return sign - if not sign: - return False - if lib_verify == "libsecp256k1": - sign_address = libsecp256k1message.recover_address(data.encode("utf8"), sign).decode("utf8") - elif lib_verify in ("sslcrypto", "sslcrypto_fallback"): - publickey = sslcurve.recover(base64.b64decode(sign), data.encode(), hash=dbl_format) - sign_address = sslcurve.public_to_address(publickey).decode() - else: - raise Exception("No library enabled for signature verification") +def verify(data, address, sign): # Verify data using address and sign + if hasattr(sign, "endswith"): + if opensslVerify: # Use the faster method if avalible + pub = opensslVerify.getMessagePubkey(data, sign) + sign_address = btctools.pubtoaddr(pub) + else: # Use pure-python + pub = btctools.ecdsa_recover(data, sign) + sign_address = btctools.pubtoaddr(pub) - if type(valid_address) is list: # Any address in the list - return sign_address in valid_address - else: # One possible address - return sign_address == valid_address + if type(address) is list: # Any address in the list + return sign_address in address + else: # One possible address + return sign_address == address + else: # Backward compatible old style + bitcoin = BitcoinECC.Bitcoin() + return bitcoin.VerifyMessageFromBitcoinAddress(address, data, sign) diff --git a/src/Crypt/CryptConnection.py b/src/Crypt/CryptConnection.py index c0903e84..fb2c0920 100644 --- a/src/Crypt/CryptConnection.py +++ b/src/Crypt/CryptConnection.py @@ -2,81 +2,23 @@ import sys import logging import os import ssl -import hashlib -import random from Config import config +from util import SslPatch from util import helper class CryptConnectionManager: def __init__(self): - if config.openssl_bin_file: - self.openssl_bin = config.openssl_bin_file - elif sys.platform.startswith("win"): - self.openssl_bin = "tools\\openssl\\openssl.exe" - elif config.dist_type.startswith("bundle_linux"): - self.openssl_bin = "../runtime/bin/openssl" + # OpenSSL params + if sys.platform.startswith("win"): + self.openssl_bin = "src\\lib\\opensslVerify\\openssl.exe" else: self.openssl_bin = "openssl" - - self.context_client = None - self.context_server = None - - self.openssl_conf_template = "src/lib/openssl/openssl.cnf" - self.openssl_conf = config.data_dir + "/openssl.cnf" - - self.openssl_env = { - "OPENSSL_CONF": self.openssl_conf, - "RANDFILE": config.data_dir + "/openssl-rand.tmp" - } + self.openssl_env = {"OPENSSL_CONF": "src/lib/opensslVerify/openssl.cnf"} self.crypt_supported = [] # Supported cryptos - self.cacert_pem = config.data_dir + "/cacert-rsa.pem" - self.cakey_pem = config.data_dir + "/cakey-rsa.pem" - self.cert_pem = config.data_dir + "/cert-rsa.pem" - self.cert_csr = config.data_dir + "/cert-rsa.csr" - self.key_pem = config.data_dir + "/key-rsa.pem" - - self.log = logging.getLogger("CryptConnectionManager") - self.log.debug("Version: %s" % ssl.OPENSSL_VERSION) - - self.fakedomains = [ - "yahoo.com", "amazon.com", "live.com", "microsoft.com", "mail.ru", "csdn.net", "bing.com", - "amazon.co.jp", "office.com", "imdb.com", "msn.com", "samsung.com", "huawei.com", "ztedevices.com", - "godaddy.com", "w3.org", "gravatar.com", "creativecommons.org", "hatena.ne.jp", - "adobe.com", "opera.com", "apache.org", "rambler.ru", "one.com", "nationalgeographic.com", - "networksolutions.com", "php.net", "python.org", "phoca.cz", "debian.org", "ubuntu.com", - "nazwa.pl", "symantec.com" - ] - - def createSslContexts(self): - if self.context_server and self.context_client: - return False - ciphers = "ECDHE-RSA-CHACHA20-POLY1305:ECDHE-RSA-AES128-GCM-SHA256:AES128-SHA256:AES256-SHA:" - ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK" - - if hasattr(ssl, "PROTOCOL_TLS"): - protocol = ssl.PROTOCOL_TLS - else: - protocol = ssl.PROTOCOL_TLSv1_2 - self.context_client = ssl.SSLContext(protocol) - self.context_client.check_hostname = False - self.context_client.verify_mode = ssl.CERT_NONE - - self.context_server = ssl.SSLContext(protocol) - self.context_server.load_cert_chain(self.cert_pem, self.key_pem) - - for ctx in (self.context_client, self.context_server): - ctx.set_ciphers(ciphers) - ctx.options |= ssl.OP_NO_COMPRESSION - try: - ctx.set_alpn_protocols(["h2", "http/1.1"]) - ctx.set_npn_protocols(["h2", "http/1.1"]) - except Exception: - pass - # Select crypt that supported by both sides # Return: Name of the crypto def selectCrypt(self, client_supported): @@ -87,24 +29,21 @@ class CryptConnectionManager: # Wrap socket for crypt # Return: wrapped socket - def wrapSocket(self, sock, crypt, server=False, cert_pin=None): + def wrapSocket(self, sock, crypt, server=False): if crypt == "tls-rsa": + ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:HIGH:" + ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK" if server: - sock_wrapped = self.context_server.wrap_socket(sock, server_side=True) + return ssl.wrap_socket( + sock, server_side=server, keyfile='%s/key-rsa.pem' % config.data_dir, + certfile='%s/cert-rsa.pem' % config.data_dir, ciphers=ciphers) else: - sock_wrapped = self.context_client.wrap_socket(sock, server_hostname=random.choice(self.fakedomains)) - if cert_pin: - cert_hash = hashlib.sha256(sock_wrapped.getpeercert(True)).hexdigest() - if cert_hash != cert_pin: - raise Exception("Socket certificate does not match (%s != %s)" % (cert_hash, cert_pin)) - return sock_wrapped + return ssl.wrap_socket(sock, ciphers=ciphers) else: return sock def removeCerts(self): - if config.keep_ssl_cert: - return False - for file_name in ["cert-rsa.pem", "key-rsa.pem", "cacert-rsa.pem", "cakey-rsa.pem", "cacert-rsa.srl", "cert-rsa.csr", "openssl-rand.tmp"]: + for file_name in ["cert-rsa.pem", "key-rsa.pem"]: file_path = "%s/%s" % (config.data_dir, file_name) if os.path.isfile(file_path): os.unlink(file_path) @@ -114,108 +53,69 @@ class CryptConnectionManager: if config.disable_encryption: return False - if self.createSslRsaCert() and "tls-rsa" not in self.crypt_supported: + if self.createSslRsaCert(): self.crypt_supported.append("tls-rsa") # Try to create RSA server cert + sign for connection encryption # Return: True on success def createSslRsaCert(self): - casubjects = [ - "/C=US/O=Amazon/OU=Server CA 1B/CN=Amazon", - "/C=US/O=Let's Encrypt/CN=Let's Encrypt Authority X3", - "/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert SHA2 High Assurance Server CA", - "/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Domain Validation Secure Server CA" - ] - self.openssl_env['CN'] = random.choice(self.fakedomains) - environ = os.environ - environ['OPENSSL_CONF'] = self.openssl_env['OPENSSL_CONF'] - environ['RANDFILE'] = self.openssl_env['RANDFILE'] - environ['CN'] = self.openssl_env['CN'] - - if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem): - self.createSslContexts() - return True # Files already exits - import subprocess - # Replace variables in config template - conf_template = open(self.openssl_conf_template).read() - conf_template = conf_template.replace("$ENV::CN", self.openssl_env['CN']) - open(self.openssl_conf, "w").write(conf_template) + if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir): + return True # Files already exits - # Generate CAcert and CAkey - cmd_params = helper.shellquote( - self.openssl_bin, - self.openssl_conf, - random.choice(casubjects), - self.cakey_pem, - self.cacert_pem - ) - cmd = "%s req -new -newkey rsa:2048 -days 3650 -nodes -x509 -config %s -subj %s -keyout %s -out %s -batch" % cmd_params - self.log.debug("Generating RSA CAcert and CAkey PEM files...") - self.log.debug("Running: %s" % cmd) proc = subprocess.Popen( - cmd, shell=True, stderr=subprocess.STDOUT, - stdout=subprocess.PIPE, env=environ + "%s req -x509 -newkey rsa:2048 -sha256 -batch -keyout %s -out %s -nodes -config %s" % helper.shellquote( + self.openssl_bin, + config.data_dir+"/key-rsa.pem", + config.data_dir+"/cert-rsa.pem", + self.openssl_env["OPENSSL_CONF"] + ), + shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env ) - back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") + back = proc.stdout.read().strip() proc.wait() + logging.debug("Generating RSA cert and key PEM files...%s" % back) - if not (os.path.isfile(self.cacert_pem) and os.path.isfile(self.cakey_pem)): - self.log.error("RSA ECC SSL CAcert generation failed, CAcert or CAkey files not exist. (%s)" % back) - return False - else: - self.log.debug("Result: %s" % back) - - # Generate certificate key and signing request - cmd_params = helper.shellquote( - self.openssl_bin, - self.key_pem, - self.cert_csr, - "/CN=" + self.openssl_env['CN'], - self.openssl_conf, - ) - cmd = "%s req -new -newkey rsa:2048 -keyout %s -out %s -subj %s -sha256 -nodes -batch -config %s" % cmd_params - self.log.debug("Generating certificate key and signing request...") - proc = subprocess.Popen( - cmd, shell=True, stderr=subprocess.STDOUT, - stdout=subprocess.PIPE, env=environ - ) - back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") - proc.wait() - self.log.debug("Running: %s\n%s" % (cmd, back)) - - # Sign request and generate certificate - cmd_params = helper.shellquote( - self.openssl_bin, - self.cert_csr, - self.cacert_pem, - self.cakey_pem, - self.cert_pem, - self.openssl_conf - ) - cmd = "%s x509 -req -in %s -CA %s -CAkey %s -set_serial 01 -out %s -days 730 -sha256 -extensions x509_ext -extfile %s" % cmd_params - self.log.debug("Generating RSA cert...") - proc = subprocess.Popen( - cmd, shell=True, stderr=subprocess.STDOUT, - stdout=subprocess.PIPE, env=environ - ) - back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") - proc.wait() - self.log.debug("Running: %s\n%s" % (cmd, back)) - - if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem): - self.createSslContexts() - - # Remove no longer necessary files - os.unlink(self.openssl_conf) - os.unlink(self.cacert_pem) - os.unlink(self.cakey_pem) - os.unlink(self.cert_csr) - + if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir): return True else: - self.log.error("RSA ECC SSL cert generation failed, cert or key files not exist.") + logging.error("RSA ECC SSL cert generation failed, cert or key files not exist.") + return False + # Not used yet: Missing on some platform + """def createSslEccCert(self): + return False + import subprocess + + # Create ECC privatekey + proc = subprocess.Popen( + "%s ecparam -name prime256v1 -genkey -out %s/key-ecc.pem" % (self.openssl_bin, config.data_dir), + shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env + ) + back = proc.stdout.read().strip() + proc.wait() + self.log.debug("Generating ECC privatekey PEM file...%s" % back) + + # Create ECC cert + proc = subprocess.Popen( + "%s req -new -key %s -x509 -nodes -out %s -config %s" % helper.shellquote( + self.openssl_bin, + config.data_dir+"/key-ecc.pem", + config.data_dir+"/cert-ecc.pem", + self.openssl_env["OPENSSL_CONF"] + ), + shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env + ) + back = proc.stdout.read().strip() + proc.wait() + self.log.debug("Generating ECC cert PEM file...%s" % back) + + if os.path.isfile("%s/cert-ecc.pem" % config.data_dir) and os.path.isfile("%s/key-ecc.pem" % config.data_dir): + return True + else: + self.logging.error("ECC SSL cert generation failed, cert or key files not exits.") + return False + """ manager = CryptConnectionManager() diff --git a/src/Crypt/CryptHash.py b/src/Crypt/CryptHash.py index f5901fb8..e25c06c6 100644 --- a/src/Crypt/CryptHash.py +++ b/src/Crypt/CryptHash.py @@ -3,54 +3,46 @@ import os import base64 -def sha512sum(file, blocksize=65536, format="hexdigest"): - if type(file) is str: # Filename specified +def sha1sum(file, blocksize=65536): + if hasattr(file, "endswith"): # Its a string open it file = open(file, "rb") - hash = hashlib.sha512() - for block in iter(lambda: file.read(blocksize), b""): - hash.update(block) - - # Truncate to 256bits is good enough - if format == "hexdigest": - return hash.hexdigest()[0:64] - else: - return hash.digest()[0:32] - - -def sha256sum(file, blocksize=65536): - if type(file) is str: # Filename specified - file = open(file, "rb") - hash = hashlib.sha256() - for block in iter(lambda: file.read(blocksize), b""): + hash = hashlib.sha1() + for block in iter(lambda: file.read(blocksize), ""): hash.update(block) return hash.hexdigest() +def sha512sum(file, blocksize=65536): + if hasattr(file, "endswith"): # Its a string open it + file = open(file, "rb") + hash = hashlib.sha512() + for block in iter(lambda: file.read(blocksize), ""): + hash.update(block) + return hash.hexdigest()[0:64] # Truncate to 256bits is good enough + + def random(length=64, encoding="hex"): if encoding == "base64": # Characters: A-Za-z0-9 hash = hashlib.sha512(os.urandom(256)).digest() - return base64.b64encode(hash).decode("ascii").replace("+", "").replace("/", "").replace("=", "")[0:length] + return base64.standard_b64encode(hash).replace("+", "").replace("/", "").replace("=", "")[0:length] else: # Characters: a-f0-9 (faster) return hashlib.sha512(os.urandom(256)).hexdigest()[0:length] -# Sha512 truncated to 256bits -class Sha512t: - def __init__(self, data): - if data: - self.sha512 = hashlib.sha512(data) - else: - self.sha512 = hashlib.sha512() - def hexdigest(self): - return self.sha512.hexdigest()[0:64] +if __name__ == "__main__": + import cStringIO as StringIO + a = StringIO.StringIO() + a.write("hello!") + a.seek(0) + print hashlib.sha1("hello!").hexdigest() + print sha1sum(a) - def digest(self): - return self.sha512.digest()[0:32] + import time + s = time.time() + print sha1sum(open("F:\\Temp\\bigfile")), + print time.time() - s - def update(self, data): - return self.sha512.update(data) - - -def sha512t(data=None): - return Sha512t(data) + s = time.time() + print sha512sum(open("F:\\Temp\\bigfile")), + print time.time() - s diff --git a/src/Crypt/CryptTor.py b/src/Crypt/CryptTor.py deleted file mode 100644 index 78ba6fc2..00000000 --- a/src/Crypt/CryptTor.py +++ /dev/null @@ -1,85 +0,0 @@ -import base64 -import hashlib - -def sign(data, privatekey): - import rsa - from rsa import pkcs1 - from lib import Ed25519 - - ## Onion Service V3 - if len(privatekey) == 88: - prv_key = base64.b64decode(privatekey) - pub_key = Ed25519.publickey_unsafe(prv_key) - sign = Ed25519.signature_unsafe(data, prv_key, pub_key) - - return sign - - ## Onion Service V2 - if "BEGIN RSA PRIVATE KEY" not in privatekey: - privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey - - priv = rsa.PrivateKey.load_pkcs1(privatekey) - sign = rsa.pkcs1.sign(data, priv, 'SHA-256') - return sign - -def verify(data, publickey, sign): - import rsa - from rsa import pkcs1 - from lib import Ed25519 - - ## Onion Service V3 - if len(publickey) == 32: - - try: - valid = Ed25519.checkvalid(sign, data, publickey) - valid = 'SHA-256' - - except Exception as err: - print(err) - valid = False - - return valid - - ## Onion Service V2 - pub = rsa.PublicKey.load_pkcs1(publickey, format="DER") - - try: - valid = rsa.pkcs1.verify(data, sign, pub) - - except pkcs1.VerificationError: - valid = False - - return valid - -def privatekeyToPublickey(privatekey): - import rsa - from rsa import pkcs1 - from lib import Ed25519 - - ## Onion Service V3 - if len(privatekey) == 88: - prv_key = base64.b64decode(privatekey) - pub_key = Ed25519.publickey_unsafe(prv_key) - - return pub_key - - ## Onion Service V2 - if "BEGIN RSA PRIVATE KEY" not in privatekey: - privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey - - priv = rsa.PrivateKey.load_pkcs1(privatekey) - pub = rsa.PublicKey(priv.n, priv.e) - - return pub.save_pkcs1("DER") - -def publickeyToOnion(publickey): - from lib import Ed25519 - - ## Onion Service V3 - if len(publickey) == 32: - addr = Ed25519.publickey_to_onionaddress(publickey)[:-6] - - return addr - - ## Onion Service V2 - return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower().decode("ascii") diff --git a/src/Db/Db.py b/src/Db/Db.py index d1d9ce15..591ee206 100644 --- a/src/Db/Db.py +++ b/src/Db/Db.py @@ -4,24 +4,10 @@ import time import logging import re import os -import atexit -import threading -import sys -import weakref -import errno - import gevent -from Debug import Debug -from .DbCursor import DbCursor -from util import SafeRe -from util import helper -from util import ThreadPool -from Config import config +from DbCursor import DbCursor -thread_pool_db = ThreadPool.ThreadPool(config.threads_db) - -next_db_id = 0 opened_dbs = [] @@ -30,242 +16,90 @@ def dbCleanup(): while 1: time.sleep(60 * 5) for db in opened_dbs[:]: - idle = time.time() - db.last_query_time - if idle > 60 * 5 and db.close_idle: - db.close("Cleanup") - - -def dbCommitCheck(): - while 1: - time.sleep(5) - for db in opened_dbs[:]: - if not db.need_commit: - continue - - success = db.commit("Interval") - if success: - db.need_commit = False - time.sleep(0.1) - - -def dbCloseAll(): - for db in opened_dbs[:]: - db.close("Close all") - + if time.time() - db.last_query_time > 60 * 3: + db.close() gevent.spawn(dbCleanup) -gevent.spawn(dbCommitCheck) -atexit.register(dbCloseAll) -class DbTableError(Exception): - def __init__(self, message, table): - super().__init__(message) - self.table = table +class Db: - -class Db(object): - - def __init__(self, schema, db_path, close_idle=False): - global next_db_id + def __init__(self, schema, db_path): self.db_path = db_path self.db_dir = os.path.dirname(db_path) + "/" self.schema = schema self.schema["version"] = self.schema.get("version", 1) self.conn = None self.cur = None - self.cursors = weakref.WeakSet() - self.id = next_db_id - next_db_id += 1 - self.progress_sleeping = False - self.commiting = False - self.log = logging.getLogger("Db#%s:%s" % (self.id, schema["db_name"])) + self.log = logging.getLogger("Db:%s" % schema["db_name"]) self.table_names = None self.collect_stats = False - self.foreign_keys = False - self.need_commit = False self.query_stats = {} self.db_keyvalues = {} - self.delayed_queue = [] - self.delayed_queue_thread = None - self.close_idle = close_idle self.last_query_time = time.time() - self.last_sleep_time = time.time() - self.num_execute_since_sleep = 0 - self.lock = ThreadPool.Lock() - self.connect_lock = ThreadPool.Lock() def __repr__(self): - return "" % (id(self), self.db_path, self.close_idle) + return "" % self.db_path def connect(self): - self.connect_lock.acquire(True) - try: - if self.conn: - self.log.debug("Already connected, connection ignored") - return + if self not in opened_dbs: + opened_dbs.append(self) - if self not in opened_dbs: - opened_dbs.append(self) - s = time.time() - try: # Directory not exist yet - os.makedirs(self.db_dir) - self.log.debug("Created Db path: %s" % self.db_dir) - except OSError as err: - if err.errno != errno.EEXIST: - raise err - if not os.path.isfile(self.db_path): - self.log.debug("Db file not exist yet: %s" % self.db_path) - self.conn = sqlite3.connect(self.db_path, isolation_level="DEFERRED", check_same_thread=False) - self.conn.row_factory = sqlite3.Row - self.conn.set_progress_handler(self.progress, 5000000) - self.conn.execute('PRAGMA journal_mode=WAL') - if self.foreign_keys: - self.conn.execute("PRAGMA foreign_keys = ON") - self.cur = self.getCursor() - - self.log.debug( - "Connected to %s in %.3fs (opened: %s, sqlite version: %s)..." % - (self.db_path, time.time() - s, len(opened_dbs), sqlite3.version) - ) - self.log.debug("Connect by thread: %s" % threading.current_thread().ident) - self.log.debug("Connect called by %s" % Debug.formatStack()) - finally: - self.connect_lock.release() - - def getConn(self): - if not self.conn: - self.connect() - return self.conn - - def progress(self, *args, **kwargs): - self.progress_sleeping = True - time.sleep(0.001) - self.progress_sleeping = False + self.log.debug("Connecting to %s (sqlite version: %s)..." % (self.db_path, sqlite3.version)) + if not os.path.isdir(self.db_dir): # Directory not exist yet + os.makedirs(self.db_dir) + self.log.debug("Created Db path: %s" % self.db_dir) + if not os.path.isfile(self.db_path): + self.log.debug("Db file not exist yet: %s" % self.db_path) + self.conn = sqlite3.connect(self.db_path) + self.conn.row_factory = sqlite3.Row + self.conn.isolation_level = None + self.cur = self.getCursor() + # We need more speed then security + self.cur.execute("PRAGMA journal_mode = WAL") + self.cur.execute("PRAGMA journal_mode = MEMORY") + self.cur.execute("PRAGMA synchronous = OFF") # Execute query using dbcursor def execute(self, query, params=None): + self.last_query_time = time.time() if not self.conn: self.connect() return self.cur.execute(query, params) - @thread_pool_db.wrap - def commit(self, reason="Unknown"): - if self.progress_sleeping: - self.log.debug("Commit ignored: Progress sleeping") - return False - - if not self.conn: - self.log.debug("Commit ignored: No connection") - return False - - if self.commiting: - self.log.debug("Commit ignored: Already commiting") - return False - - try: - s = time.time() - self.commiting = True - self.conn.commit() - self.log.debug("Commited in %.3fs (reason: %s)" % (time.time() - s, reason)) - return True - except Exception as err: - if "SQL statements in progress" in str(err): - self.log.warning("Commit delayed: %s (reason: %s)" % (Debug.formatException(err), reason)) - else: - self.log.error("Commit error: %s (reason: %s)" % (Debug.formatException(err), reason)) - return False - finally: - self.commiting = False - - def insertOrUpdate(self, *args, **kwargs): - if not self.conn: - self.connect() - return self.cur.insertOrUpdate(*args, **kwargs) - - def executeDelayed(self, *args, **kwargs): - if not self.delayed_queue_thread: - self.delayed_queue_thread = gevent.spawn_later(1, self.processDelayed) - self.delayed_queue.append(("execute", (args, kwargs))) - - def insertOrUpdateDelayed(self, *args, **kwargs): - if not self.delayed_queue: - gevent.spawn_later(1, self.processDelayed) - self.delayed_queue.append(("insertOrUpdate", (args, kwargs))) - - def processDelayed(self): - if not self.delayed_queue: - self.log.debug("processDelayed aborted") - return - if not self.conn: - self.connect() - - s = time.time() - cur = self.getCursor() - for command, params in self.delayed_queue: - if command == "insertOrUpdate": - cur.insertOrUpdate(*params[0], **params[1]) - else: - cur.execute(*params[0], **params[1]) - - if len(self.delayed_queue) > 10: - self.log.debug("Processed %s delayed queue in %.3fs" % (len(self.delayed_queue), time.time() - s)) - self.delayed_queue = [] - self.delayed_queue_thread = None - - def close(self, reason="Unknown"): - if not self.conn: - return False - self.connect_lock.acquire() - s = time.time() - if self.delayed_queue: - self.processDelayed() + def close(self): + self.log.debug("Closing, opened: %s" % opened_dbs) if self in opened_dbs: opened_dbs.remove(self) - self.need_commit = False - self.commit("Closing: %s" % reason) - self.log.debug("Close called by %s" % Debug.formatStack()) - for i in range(5): - if len(self.cursors) == 0: - break - self.log.debug("Pending cursors: %s" % len(self.cursors)) - time.sleep(0.1 * i) - if len(self.cursors): - self.log.debug("Killing cursors: %s" % len(self.cursors)) - self.conn.interrupt() - if self.cur: self.cur.close() if self.conn: - ThreadPool.main_loop.call(self.conn.close) + self.conn.close() self.conn = None self.cur = None - self.log.debug("%s closed (reason: %s) in %.3fs, opened: %s" % (self.db_path, reason, time.time() - s, len(opened_dbs))) - self.connect_lock.release() - return True # Gets a cursor object to database # Return: Cursor class def getCursor(self): if not self.conn: self.connect() - - cur = DbCursor(self) - return cur - - def getSharedCursor(self): - if not self.conn: - self.connect() - return self.cur + return DbCursor(self.conn, self) # Get the table version # Return: Table version or None if not exist def getTableVersion(self, table_name): + """if not self.table_names: # Get existing table names + res = self.cur.execute("SELECT name FROM sqlite_master WHERE type='table'") + self.table_names = [row["name"] for row in res] + if table_name not in self.table_names: + return False + + else:""" if not self.db_keyvalues: # Get db keyvalues try: - res = self.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues - except sqlite3.OperationalError as err: # Table not exist - self.log.debug("Query table version error: %s" % err) + res = self.cur.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues + except sqlite3.OperationalError, err: # Table not exist + self.log.debug("Query error: %s" % err) return False for row in res: @@ -278,8 +112,9 @@ class Db(object): def checkTables(self): s = time.time() changed_tables = [] + cur = self.getCursor() - cur = self.getSharedCursor() + cur.execute("BEGIN") # Check internal tables # Check keyvalue table @@ -287,115 +122,85 @@ class Db(object): ["keyvalue_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], ["key", "TEXT"], ["value", "INTEGER"], - ["json_id", "INTEGER"], + ["json_id", "INTEGER REFERENCES json (json_id)"], ], [ "CREATE UNIQUE INDEX key_id ON keyvalue(json_id, key)" ], version=self.schema["version"]) if changed: changed_tables.append("keyvalue") - # Create json table if no custom one defined - if "json" not in self.schema.get("tables", {}): - if self.schema["version"] == 1: - changed = cur.needTable("json", [ - ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], - ["path", "VARCHAR(255)"] - ], [ - "CREATE UNIQUE INDEX path ON json(path)" - ], version=self.schema["version"]) - elif self.schema["version"] == 2: - changed = cur.needTable("json", [ - ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], - ["directory", "VARCHAR(255)"], - ["file_name", "VARCHAR(255)"] - ], [ - "CREATE UNIQUE INDEX path ON json(directory, file_name)" - ], version=self.schema["version"]) - elif self.schema["version"] == 3: - changed = cur.needTable("json", [ - ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], - ["site", "VARCHAR(255)"], - ["directory", "VARCHAR(255)"], - ["file_name", "VARCHAR(255)"] - ], [ - "CREATE UNIQUE INDEX path ON json(directory, site, file_name)" - ], version=self.schema["version"]) - if changed: - changed_tables.append("json") + # Check json table + if self.schema["version"] == 1: + changed = cur.needTable("json", [ + ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], + ["path", "VARCHAR(255)"] + ], [ + "CREATE UNIQUE INDEX path ON json(path)" + ], version=self.schema["version"]) + else: + changed = cur.needTable("json", [ + ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], + ["directory", "VARCHAR(255)"], + ["file_name", "VARCHAR(255)"] + ], [ + "CREATE UNIQUE INDEX path ON json(directory, file_name)" + ], version=self.schema["version"]) + if changed: + changed_tables.append("json") # Check schema tables - for table_name, table_settings in self.schema.get("tables", {}).items(): - try: - indexes = table_settings.get("indexes", []) - version = table_settings.get("schema_changed", 0) - changed = cur.needTable( - table_name, table_settings["cols"], - indexes, version=version - ) - if changed: - changed_tables.append(table_name) - except Exception as err: - self.log.error("Error creating table %s: %s" % (table_name, Debug.formatException(err))) - raise DbTableError(err, table_name) + for table_name, table_settings in self.schema["tables"].items(): + changed = cur.needTable( + table_name, table_settings["cols"], + table_settings["indexes"], version=table_settings["schema_changed"] + ) + if changed: + changed_tables.append(table_name) + cur.execute("COMMIT") self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time() - s, changed_tables)) if changed_tables: self.db_keyvalues = {} # Refresh table version cache return changed_tables - # Update json file to db + # Load json file to db # Return: True if matched - def updateJson(self, file_path, file=None, cur=None): + def loadJson(self, file_path, file=None, cur=None): if not file_path.startswith(self.db_dir): return False # Not from the db dir: Skipping - relative_path = file_path[len(self.db_dir):] # File path realative to db file - + relative_path = re.sub("^%s" % self.db_dir, "", file_path) # File path realative to db file # Check if filename matches any of mappings in schema matched_maps = [] for match, map_settings in self.schema["maps"].items(): - try: - if SafeRe.match(match, relative_path): - matched_maps.append(map_settings) - except SafeRe.UnsafePatternError as err: - self.log.error(err) + if re.match(match, relative_path): + matched_maps.append(map_settings) # No match found for the file if not matched_maps: return False # Load the json file - try: - if file is None: # Open file is not file object passed - file = open(file_path, "rb") - - if file is False: # File deleted - data = {} - else: - if file_path.endswith("json.gz"): - file = helper.limitedGzipFile(fileobj=file) - - if sys.version_info.major == 3 and sys.version_info.minor < 6: - data = json.loads(file.read().decode("utf8")) - else: - data = json.load(file) - except Exception as err: - self.log.debug("Json file %s load error: %s" % (file_path, err)) - data = {} + if not file: + file = open(file_path) + data = json.load(file) # No cursor specificed if not cur: - cur = self.getSharedCursor() + cur = self.getCursor() + cur.execute("BEGIN") cur.logging = False + commit_after_done = True + else: + commit_after_done = False - # Row for current json file if required - if not data or [dbmap for dbmap in matched_maps if "to_keyvalue" in dbmap or "to_table" in dbmap]: - json_row = cur.getJsonRow(relative_path) + # Row for current json file + json_row = cur.getJsonRow(relative_path) # Check matched mappings in schema - for dbmap in matched_maps: + for map in matched_maps: # Insert non-relational key values - if dbmap.get("to_keyvalue"): + if map.get("to_keyvalue"): # Get current values res = cur.execute("SELECT * FROM keyvalue WHERE json_id = ?", (json_row["json_id"],)) current_keyvalue = {} @@ -404,7 +209,7 @@ class Db(object): current_keyvalue[row["key"]] = row["value"] current_keyvalue_id[row["key"]] = row["keyvalue_id"] - for key in dbmap["to_keyvalue"]: + for key in map["to_keyvalue"]: if key not in current_keyvalue: # Keyvalue not exist yet in the db cur.execute( "INSERT INTO keyvalue ?", @@ -416,21 +221,15 @@ class Db(object): (data.get(key), current_keyvalue_id[key]) ) - # Insert data to json table for easier joins - if dbmap.get("to_json_table"): - directory, file_name = re.match("^(.*?)/*([^/]*)$", relative_path).groups() - data_json_row = dict(cur.getJsonRow(directory + "/" + dbmap.get("file_name", file_name))) - changed = False - for key in dbmap["to_json_table"]: - if data.get(key) != data_json_row.get(key): - changed = True - if changed: - # Add the custom col values - data_json_row.update({key: val for key, val in data.items() if key in dbmap["to_json_table"]}) - cur.execute("INSERT OR REPLACE INTO json ?", data_json_row) + """ + for key in map.get("to_keyvalue", []): + cur.execute("INSERT OR REPLACE INTO keyvalue ?", + {"key": key, "value": data.get(key), "json_id": json_row["json_id"]} + ) + """ # Insert data to tables - for table_settings in dbmap.get("to_table", []): + for table_settings in map.get("to_table", []): if isinstance(table_settings, dict): # Custom settings table_name = table_settings["table"] # Table name to insert datas node = table_settings.get("node", table_name) # Node keyname in data json file @@ -446,38 +245,34 @@ class Db(object): import_cols = None replaces = None - # Fill import cols from table cols - if not import_cols: - import_cols = set([item[0] for item in self.schema["tables"][table_name]["cols"]]) - cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],)) if node not in data: continue if key_col: # Map as dict - for key, val in data[node].items(): + for key, val in data[node].iteritems(): if val_col: # Single value cur.execute( "INSERT OR REPLACE INTO %s ?" % table_name, {key_col: key, val_col: val, "json_id": json_row["json_id"]} ) else: # Multi value - if type(val) is dict: # Single row + if isinstance(val, dict): # Single row row = val if import_cols: - row = {key: row[key] for key in row if key in import_cols} # Filter row by import_cols + row = {key: row[key] for key in import_cols} # Filter row by import_cols row[key_col] = key # Replace in value if necessary if replaces: - for replace_key, replace in replaces.items(): + for replace_key, replace in replaces.iteritems(): if replace_key in row: - for replace_from, replace_to in replace.items(): + for replace_from, replace_to in replace.iteritems(): row[replace_key] = row[replace_key].replace(replace_from, replace_to) row["json_id"] = json_row["json_id"] cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row) - elif type(val) is list: # Multi row + else: # Multi row for row in val: row[key_col] = key row["json_id"] = json_row["json_id"] @@ -485,15 +280,10 @@ class Db(object): else: # Map as list for row in data[node]: row["json_id"] = json_row["json_id"] - if import_cols: - row = {key: row[key] for key in row if key in import_cols} # Filter row by import_cols cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row) - # Cleanup json row - if not data: - self.log.debug("Cleanup json row for %s" % file_path) - cur.execute("DELETE FROM json WHERE json_id = %s" % json_row["json_id"]) - + if commit_after_done: + cur.execute("COMMIT") return True @@ -507,13 +297,15 @@ if __name__ == "__main__": dbjson.collect_stats = True dbjson.checkTables() cur = dbjson.getCursor() + cur.execute("BEGIN") cur.logging = False - dbjson.updateJson("data/users/content.json", cur=cur) + dbjson.loadJson("data/users/content.json", cur=cur) for user_dir in os.listdir("data/users"): if os.path.isdir("data/users/%s" % user_dir): - dbjson.updateJson("data/users/%s/data.json" % user_dir, cur=cur) + dbjson.loadJson("data/users/%s/data.json" % user_dir, cur=cur) # print ".", cur.logging = True - print("Done in %.3fs" % (time.time() - s)) + cur.execute("COMMIT") + print "Done in %.3fs" % (time.time() - s) for query, stats in sorted(dbjson.query_stats.items()): - print("-", query, stats) + print "-", query, stats diff --git a/src/Db/DbCursor.py b/src/Db/DbCursor.py index acb8846d..a34f9157 100644 --- a/src/Db/DbCursor.py +++ b/src/Db/DbCursor.py @@ -1,119 +1,51 @@ import time import re -from util import helper # Special sqlite cursor class DbCursor: - def __init__(self, db): + def __init__(self, conn, db): + self.conn = conn self.db = db + self.cursor = conn.cursor() self.logging = False - def quoteValue(self, value): - if type(value) is int: - return str(value) - else: - return "'%s'" % value.replace("'", "''") - - def parseQuery(self, query, params): - query_type = query.split(" ", 1)[0].upper() - if isinstance(params, dict) and "?" in query: # Make easier select and insert by allowing dict params - if query_type in ("SELECT", "DELETE", "UPDATE"): + def execute(self, query, params=None): + if isinstance(params, dict): # Make easier select and insert by allowing dict params + if query.startswith("SELECT") or query.startswith("DELETE"): # Convert param dict to SELECT * FROM table WHERE key = ? AND key2 = ? format query_wheres = [] values = [] for key, value in params.items(): if type(value) is list: - if key.startswith("not__"): - field = key.replace("not__", "") - operator = "NOT IN" - else: - field = key - operator = "IN" - if len(value) > 100: - # Embed values in query to avoid "too many SQL variables" error - query_values = ",".join(map(helper.sqlquote, value)) - else: - query_values = ",".join(["?"] * len(value)) - values += value - query_wheres.append( - "%s %s (%s)" % - (field, operator, query_values) - ) + query_wheres.append(key+" IN ("+",".join(["?"]*len(value))+")") + values += value else: - if key.startswith("not__"): - query_wheres.append(key.replace("not__", "") + " != ?") - elif key.endswith("__like"): - query_wheres.append(key.replace("__like", "") + " LIKE ?") - elif key.endswith(">"): - query_wheres.append(key.replace(">", "") + " > ?") - elif key.endswith("<"): - query_wheres.append(key.replace("<", "") + " < ?") - else: - query_wheres.append(key + " = ?") + query_wheres.append(key+" = ?") values.append(value) wheres = " AND ".join(query_wheres) - if wheres == "": - wheres = "1" - query = re.sub("(.*)[?]", "\\1 %s" % wheres, query) # Replace the last ? + query = query.replace("?", wheres) params = values else: # Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format keys = ", ".join(params.keys()) values = ", ".join(['?' for key in params.keys()]) - keysvalues = "(%s) VALUES (%s)" % (keys, values) - query = re.sub("(.*)[?]", "\\1%s" % keysvalues, query) # Replace the last ? + query = query.replace("?", "(%s) VALUES (%s)" % (keys, values)) params = tuple(params.values()) - elif isinstance(params, dict) and ":" in query: - new_params = dict() - values = [] - for key, value in params.items(): - if type(value) is list: - for idx, val in enumerate(value): - new_params[key + "__" + str(idx)] = val - new_names = [":" + key + "__" + str(idx) for idx in range(len(value))] - query = re.sub(r":" + re.escape(key) + r"([)\s]|$)", "(%s)%s" % (", ".join(new_names), r"\1"), query) - else: - new_params[key] = value + s = time.time() + # if query == "COMMIT": self.logging = True # Turn logging back on transaction commit - params = new_params - return query, params - - def execute(self, query, params=None): - query = query.strip() - while self.db.progress_sleeping or self.db.commiting: - time.sleep(0.1) - - self.db.last_query_time = time.time() - - query, params = self.parseQuery(query, params) - - cursor = self.db.getConn().cursor() - self.db.cursors.add(cursor) - if self.db.lock.locked(): - self.db.log.debug("Locked for %.3fs" % (time.time() - self.db.lock.time_lock)) - - try: - s = time.time() - self.db.lock.acquire(True) - if query.upper().strip("; ") == "VACUUM": - self.db.commit("vacuum called") - if params: - res = cursor.execute(query, params) - else: - res = cursor.execute(query) - finally: - self.db.lock.release() - - taken_query = time.time() - s - if self.logging or taken_query > 1: - if params: # Query has parameters - self.db.log.debug("Query: " + query + " " + str(params) + " (Done in %.4f)" % (time.time() - s)) - else: - self.db.log.debug("Query: " + query + " (Done in %.4f)" % (time.time() - s)) + if params: # Query has parameters + res = self.cursor.execute(query, params) + if self.logging: + self.db.log.debug((query.replace("?", "%s") % params) + " (Done in %.4f)" % (time.time() - s)) + else: + res = self.cursor.execute(query) + if self.logging: + self.db.log.debug(query + " (Done in %.4f)" % (time.time() - s)) # Log query stats if self.db.collect_stats: @@ -122,59 +54,23 @@ class DbCursor: self.db.query_stats[query]["call"] += 1 self.db.query_stats[query]["time"] += time.time() - s - query_type = query.split(" ", 1)[0].upper() - is_update_query = query_type in ["UPDATE", "DELETE", "INSERT", "CREATE"] - if not self.db.need_commit and is_update_query: - self.db.need_commit = True - - if is_update_query: - return cursor - else: - return res - - def executemany(self, query, params): - while self.db.progress_sleeping or self.db.commiting: - time.sleep(0.1) - - self.db.last_query_time = time.time() - - s = time.time() - cursor = self.db.getConn().cursor() - self.db.cursors.add(cursor) - - try: - self.db.lock.acquire(True) - cursor.executemany(query, params) - finally: - self.db.lock.release() - - taken_query = time.time() - s - if self.logging or taken_query > 0.1: - self.db.log.debug("Execute many: %s (Done in %.4f)" % (query, taken_query)) - - self.db.need_commit = True - - return cursor - - # Creates on updates a database row without incrementing the rowid - def insertOrUpdate(self, table, query_sets, query_wheres, oninsert={}): - sql_sets = ["%s = :%s" % (key, key) for key in query_sets.keys()] - sql_wheres = ["%s = :%s" % (key, key) for key in query_wheres.keys()] - - params = query_sets - params.update(query_wheres) - res = self.execute( - "UPDATE %s SET %s WHERE %s" % (table, ", ".join(sql_sets), " AND ".join(sql_wheres)), - params - ) - if res.rowcount == 0: - params.update(oninsert) # Add insert-only fields - self.execute("INSERT INTO %s ?" % table, params) + # if query == "BEGIN": self.logging = False # Turn logging off on transaction commit + return res # Create new table # Return: True on success def createTable(self, table, cols): # TODO: Check current structure + """table_changed = False + res = c.execute("PRAGMA table_info(%s)" % table) + if res: + for row in res: + print row["name"], row["type"], cols[row["name"]] + print row + else: + table_changed = True + + if table_changed: # Table structure changed, drop and create again""" self.execute("DROP TABLE IF EXISTS %s" % table) col_definitions = [] for col_name, col_type in cols: @@ -186,10 +82,8 @@ class DbCursor: # Create indexes on table # Return: True on success def createIndexes(self, table, indexes): + # indexes.append("CREATE INDEX %s_id ON %s(%s_id)" % (table, table, table)) # Primary key index for index in indexes: - if not index.strip().upper().startswith("CREATE"): - self.db.log.error("Index command should start with CREATE: %s" % index) - continue self.execute(index) # Create table if not exist @@ -197,7 +91,7 @@ class DbCursor: def needTable(self, table, cols, indexes=None, version=1): current_version = self.db.getTableVersion(table) if int(current_version) < int(version): # Table need update or not extis - self.db.log.debug("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version)) + self.db.log.info("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version)) self.createTable(table, cols) if indexes: self.createIndexes(table, indexes) @@ -214,33 +108,20 @@ class DbCursor: def getJsonRow(self, file_path): directory, file_name = re.match("^(.*?)/*([^/]*)$", file_path).groups() if self.db.schema["version"] == 1: - # One path field res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path}) row = res.fetchone() if not row: # No row yet, create it self.execute("INSERT INTO json ?", {"path": file_path}) res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path}) row = res.fetchone() - elif self.db.schema["version"] == 2: - # Separate directory, file_name (easier join) + else: res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name}) row = res.fetchone() if not row: # No row yet, create it self.execute("INSERT INTO json ?", {"directory": directory, "file_name": file_name}) res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name}) row = res.fetchone() - elif self.db.schema["version"] == 3: - # Separate site, directory, file_name (for merger sites) - site_address, directory = re.match("^([^/]*)/(.*)$", directory).groups() - res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"site": site_address, "directory": directory, "file_name": file_name}) - row = res.fetchone() - if not row: # No row yet, create it - self.execute("INSERT INTO json ?", {"site": site_address, "directory": directory, "file_name": file_name}) - res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"site": site_address, "directory": directory, "file_name": file_name}) - row = res.fetchone() - else: - raise Exception("Dbschema version %s not supported" % self.db.schema.get("version")) return row def close(self): - pass + self.cursor.close() diff --git a/src/Db/DbQuery.py b/src/Db/DbQuery.py deleted file mode 100644 index 3fb5ef73..00000000 --- a/src/Db/DbQuery.py +++ /dev/null @@ -1,46 +0,0 @@ -import re - - -# Parse and modify sql queries -class DbQuery: - def __init__(self, query): - self.setQuery(query.strip()) - - # Split main parts of query - def parseParts(self, query): - parts = re.split("(SELECT|FROM|WHERE|ORDER BY|LIMIT)", query) - parts = [_f for _f in parts if _f] # Remove empty parts - parts = [s.strip() for s in parts] # Remove whitespace - return dict(list(zip(parts[0::2], parts[1::2]))) - - # Parse selected fields SELECT ... FROM - def parseFields(self, query_select): - fields = re.findall("([^,]+) AS ([^,]+)", query_select) - return {key: val.strip() for val, key in fields} - - # Parse query conditions WHERE ... - def parseWheres(self, query_where): - if " AND " in query_where: - return query_where.split(" AND ") - elif query_where: - return [query_where] - else: - return [] - - # Set the query - def setQuery(self, query): - self.parts = self.parseParts(query) - self.fields = self.parseFields(self.parts["SELECT"]) - self.wheres = self.parseWheres(self.parts.get("WHERE", "")) - - # Convert query back to string - def __str__(self): - query_parts = [] - for part_name in ["SELECT", "FROM", "WHERE", "ORDER BY", "LIMIT"]: - if part_name == "WHERE" and self.wheres: - query_parts.append("WHERE") - query_parts.append(" AND ".join(self.wheres)) - elif part_name in self.parts: - query_parts.append(part_name) - query_parts.append(self.parts[part_name]) - return "\n".join(query_parts) diff --git a/src/Db/__init__.py b/src/Db/__init__.py index e69de29b..1f7f580c 100644 --- a/src/Db/__init__.py +++ b/src/Db/__init__.py @@ -0,0 +1,2 @@ +from Db import Db +from DbCursor import DbCursor \ No newline at end of file diff --git a/src/Debug/Debug.py b/src/Debug/Debug.py index 0ec42615..f3b54fd7 100644 --- a/src/Debug/Debug.py +++ b/src/Debug/Debug.py @@ -1,172 +1,40 @@ import sys import os -import re -from Config import config +import traceback # Non fatal exception class Notify(Exception): - def __init__(self, message=None): - if message: - self.message = message + def __init__(self, message): + self.message = message def __str__(self): return self.message -# Gevent greenlet.kill accept Exception type -def createNotifyType(message): - return type("Notify", (Notify, ), {"message": message}) - - -def formatExceptionMessage(err): - err_type = err.__class__.__name__ - if err.args: - err_message = err.args[-1] - else: - err_message = err.__str__() - return "%s: %s" % (err_type, err_message) - - -python_lib_dirs = [path.replace("\\", "/") for path in sys.path if re.sub(r".*[\\/]", "", path) in ("site-packages", "dist-packages")] -python_lib_dirs.append(os.path.dirname(os.__file__).replace("\\", "/")) # TODO: check if returns the correct path for PyPy - -root_dir = os.path.realpath(os.path.dirname(__file__) + "/../../") -root_dir = root_dir.replace("\\", "/") - - -def formatTraceback(items, limit=None, fold_builtin=True): - back = [] - i = 0 - prev_file_title = "" - is_prev_builtin = False - - for path, line in items: - i += 1 - is_last = i == len(items) - path = path.replace("\\", "/") - - if path.startswith("src/gevent/"): - file_title = "/" + path[len("src/gevent/"):] - is_builtin = True - is_skippable_builtin = False - elif path in ("", ""): - file_title = "(importlib)" - is_builtin = True - is_skippable_builtin = True - else: - is_skippable_builtin = False - for base in python_lib_dirs: - if path.startswith(base + "/"): - file_title = path[len(base + "/"):] - module_name, *tail = file_title.split("/") - if module_name.endswith(".py"): - module_name = module_name[:-3] - file_title = "/".join(["<%s>" % module_name] + tail) - is_builtin = True - break - else: - is_builtin = False - for base in (root_dir + "/src", root_dir + "/plugins", root_dir): - if path.startswith(base + "/"): - file_title = path[len(base + "/"):] - break - else: - # For unknown paths, do our best to hide absolute path - file_title = path - for needle in ("/zeronet/", "/core/"): - if needle in file_title.lower(): - file_title = "?/" + file_title[file_title.lower().rindex(needle) + len(needle):] - - # Path compression: A/AB/ABC/X/Y.py -> ABC/X/Y.py - # E.g.: in 'Db/DbCursor.py' the directory part is unnecessary - if not file_title.startswith("/"): - prev_part = "" - for i, part in enumerate(file_title.split("/") + [""]): - if not part.startswith(prev_part): - break - prev_part = part - file_title = "/".join(file_title.split("/")[i - 1:]) - - if is_skippable_builtin and fold_builtin: - pass - elif is_builtin and is_prev_builtin and not is_last and fold_builtin: - if back[-1] != "...": - back.append("...") - else: - if file_title == prev_file_title: - back.append("%s" % line) - else: - back.append("%s line %s" % (file_title, line)) - - prev_file_title = file_title - is_prev_builtin = is_builtin - - if limit and i >= limit: - back.append("...") - break - return back - - def formatException(err=None, format="text"): - import traceback if type(err) == Notify: return err - elif type(err) == tuple and err and err[0] is not None: # Passed trackeback info - exc_type, exc_obj, exc_tb = err - err = None - else: # No trackeback info passed, get latest - exc_type, exc_obj, exc_tb = sys.exc_info() - + exc_type, exc_obj, exc_tb = sys.exc_info() if not err: - if hasattr(err, "message"): - err = exc_obj.message - else: - err = exc_obj - - tb = formatTraceback([[frame[0], frame[1]] for frame in traceback.extract_tb(exc_tb)]) + err = exc_obj.message + tb = [] + for frame in traceback.extract_tb(exc_tb): + path, line, function, text = frame + file = os.path.split(path)[1] + tb.append("%s line %s" % (file, line)) if format == "html": - return "%s: %s
    %s" % (repr(err), err, " > ".join(tb)) + return "%s: %s
    %s" % (exc_type.__name__, err, " > ".join(tb)) else: return "%s: %s in %s" % (exc_type.__name__, err, " > ".join(tb)) -def formatStack(limit=None): - import inspect - tb = formatTraceback([[frame[1], frame[2]] for frame in inspect.stack()[1:]], limit=limit) - return " > ".join(tb) - - -# Test if gevent eventloop blocks -import logging -import gevent -import time - - -num_block = 0 - - -def testBlock(): - global num_block - logging.debug("Gevent block checker started") - last_time = time.time() - while 1: - time.sleep(1) - if time.time() - last_time > 1.1: - logging.debug("Gevent block detected: %.3fs" % (time.time() - last_time - 1)) - num_block += 1 - last_time = time.time() - - -gevent.spawn(testBlock) - - if __name__ == "__main__": try: - print(1 / 0) - except Exception as err: - print(type(err).__name__) - print("1/0 error: %s" % formatException(err)) + print 1 / 0 + except Exception, err: + print type(err).__name__ + print "1/0 error: %s" % formatException(err) def loadJson(): json.loads("Errr") @@ -174,13 +42,13 @@ if __name__ == "__main__": import json try: loadJson() - except Exception as err: - print(err) - print("Json load error: %s" % formatException(err)) + except Exception, err: + print err + print "Json load error: %s" % formatException(err) try: raise Notify("nothing...") - except Exception as err: - print("Notify: %s" % formatException(err)) + except Exception, err: + print "Notify: %s" % formatException(err) loadJson() diff --git a/src/Debug/DebugHook.py b/src/Debug/DebugHook.py index d100a3b8..68afd220 100644 --- a/src/Debug/DebugHook.py +++ b/src/Debug/DebugHook.py @@ -1,32 +1,15 @@ import sys import logging -import signal -import importlib import gevent -import gevent.hub from Config import config -from . import Debug last_error = None -def shutdown(reason="Unknown"): - logging.info("Shutting down (reason: %s)..." % reason) - import main - if "file_server" in dir(main): - try: - gevent.spawn(main.file_server.stop) - if "ui_server" in dir(main): - gevent.spawn(main.ui_server.stop) - except Exception as err: - print("Proper shutdown error: %s" % err) - sys.exit(0) - else: - sys.exit(0) # Store last error, ignore notify, allow manual error logging -def handleError(*args, **kwargs): +def handleError(*args): global last_error if not args: # Manual called args = sys.exc_info() @@ -35,81 +18,47 @@ def handleError(*args, **kwargs): silent = False if args[0].__name__ != "Notify": last_error = args - - if args[0].__name__ == "KeyboardInterrupt": - shutdown("Keyboard interrupt") - elif not silent and args[0].__name__ != "Notify": + if not silent and args[0].__name__ != "Notify": logging.exception("Unhandled exception") - if "greenlet.py" not in args[2].tb_frame.f_code.co_filename: # Don't display error twice - sys.__excepthook__(*args, **kwargs) + sys.__excepthook__(*args) # Ignore notify errors -def handleErrorNotify(*args, **kwargs): - err = args[0] - if err.__name__ == "KeyboardInterrupt": - shutdown("Keyboard interrupt") - elif err.__name__ != "Notify": - logging.error("Unhandled exception: %s" % Debug.formatException(args)) - sys.__excepthook__(*args, **kwargs) +def handleErrorNotify(*args): + if args[0].__name__ != "Notify": + logging.exception("Unhandled exception") + sys.__excepthook__(*args) -if config.debug: # Keep last error for /Debug +OriginalGreenlet = gevent.Greenlet + + +class ErrorhookedGreenlet(OriginalGreenlet): + def _report_error(self, exc_info): + sys.excepthook(exc_info[0], exc_info[1], exc_info[2]) + +if config.debug: sys.excepthook = handleError else: sys.excepthook = handleErrorNotify - -# Override default error handler to allow silent killing / custom logging -if "handle_error" in dir(gevent.hub.Hub): - gevent.hub.Hub._original_handle_error = gevent.hub.Hub.handle_error -else: - logging.debug("gevent.hub.Hub.handle_error not found using old gevent hooks") - OriginalGreenlet = gevent.Greenlet - class ErrorhookedGreenlet(OriginalGreenlet): - def _report_error(self, exc_info): - sys.excepthook(exc_info[0], exc_info[1], exc_info[2]) - - gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet - importlib.reload(gevent) - -def handleGreenletError(context, type, value, tb): - if context.__class__ is tuple and context[0].__class__.__name__ == "ThreadPool": - # Exceptions in ThreadPool will be handled in the main Thread - return None - - if isinstance(value, str): - # Cython can raise errors where the value is a plain string - # e.g., AttributeError, "_semaphore.Semaphore has no attr", - value = type(value) - - if not issubclass(type, gevent.get_hub().NOT_ERROR): - sys.excepthook(type, value, tb) - -gevent.get_hub().handle_error = handleGreenletError - -try: - signal.signal(signal.SIGTERM, lambda signum, stack_frame: shutdown("SIGTERM")) -except Exception as err: - logging.debug("Error setting up SIGTERM watcher: %s" % err) - +gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet +reload(gevent) if __name__ == "__main__": import time from gevent import monkey monkey.patch_all(thread=False, ssl=False) - from . import Debug + import Debug - def sleeper(num): - print("started", num) + def sleeper(): + print "started" time.sleep(3) - raise Exception("Error") - print("stopped", num) - thread1 = gevent.spawn(sleeper, 1) - thread2 = gevent.spawn(sleeper, 2) + print "stopped" + thread1 = gevent.spawn(sleeper) + thread2 = gevent.spawn(sleeper) time.sleep(1) - print("killing...") - thread1.kill(exception=Debug.Notify("Worker stopped")) - #thread2.throw(Debug.Notify("Throw")) - print("killed") - gevent.joinall([thread1,thread2]) + print "killing..." + thread1.throw(Exception("Hello")) + thread2.throw(Debug.Notify("Throw")) + print "killed" diff --git a/src/Debug/DebugLock.py b/src/Debug/DebugLock.py deleted file mode 100644 index 9cf22520..00000000 --- a/src/Debug/DebugLock.py +++ /dev/null @@ -1,24 +0,0 @@ -import time -import logging - -import gevent.lock - -from Debug import Debug - - -class DebugLock: - def __init__(self, log_after=0.01, name="Lock"): - self.name = name - self.log_after = log_after - self.lock = gevent.lock.Semaphore(1) - self.release = self.lock.release - - def acquire(self, *args, **kwargs): - s = time.time() - res = self.lock.acquire(*args, **kwargs) - time_taken = time.time() - s - if time_taken >= self.log_after: - logging.debug("%s: Waited %.3fs after called by %s" % - (self.name, time_taken, Debug.formatStack()) - ) - return res diff --git a/src/Debug/DebugMedia.py b/src/Debug/DebugMedia.py index a892dc56..27256d09 100644 --- a/src/Debug/DebugMedia.py +++ b/src/Debug/DebugMedia.py @@ -3,7 +3,6 @@ import subprocess import re import logging import time -import functools from Config import config from util import helper @@ -11,17 +10,7 @@ from util import helper # Find files with extension in path def findfiles(path, find_ext): - def sorter(f1, f2): - f1 = f1[0].replace(path, "") - f2 = f2[0].replace(path, "") - if f1 == "": - return 1 - elif f2 == "": - return -1 - else: - return helper.cmp(f1.lower(), f2.lower()) - - for root, dirs, files in sorted(os.walk(path, topdown=False), key=functools.cmp_to_key(sorter)): + for root, dirs, files in os.walk(path, topdown=False): for file in sorted(files): file_path = root + "/" + file file_ext = file.split(".")[-1] @@ -45,7 +34,6 @@ def findCoffeescriptCompiler(): # Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features def merge(merged_path): - merged_path = merged_path.replace("\\", "/") merge_dir = os.path.dirname(merged_path) s = time.time() ext = merged_path.split(".")[-1] @@ -62,69 +50,66 @@ def merge(merged_path): changed = {} for file_path in findfiles(merge_dir, find_ext): - if os.path.getmtime(file_path) > merged_mtime + 1: + if os.path.getmtime(file_path) > merged_mtime: changed[file_path] = True if not changed: return # Assets not changed, nothing to do - old_parts = {} if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile - merged_old = open(merged_path, "rb").read() - for match in re.findall(rb"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL): - old_parts[match[1].decode()] = match[2].strip(b"\n\r") + merged_old = open(merged_path, "rb").read().decode("utf8") + old_parts = {} + for match in re.findall("(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL): + old_parts[match[1]] = match[2].strip("\n\r") - logging.debug("Merging %s (changed: %s, old parts: %s)" % (merged_path, changed, len(old_parts))) # Merge files parts = [] s_total = time.time() for file_path in findfiles(merge_dir, find_ext): - file_relative_path = file_path.replace(merge_dir + "/", "") - parts.append(b"\n/* ---- %s ---- */\n\n" % file_relative_path.encode("utf8")) + parts.append("\n\n/* ---- %s ---- */\n\n" % file_path) if file_path.endswith(".coffee"): # Compile coffee script - if file_path in changed or file_relative_path not in old_parts: # Only recompile if changed or its not compiled before + if file_path in changed or file_path not in old_parts: # Only recompile if changed or its not compiled before if config.coffeescript_compiler is None: config.coffeescript_compiler = findCoffeescriptCompiler() if not config.coffeescript_compiler: - logging.error("No coffeescript compiler defined, skipping compiling %s" % merged_path) + logging.error("No coffeescript compiler definied, skipping compiling %s" % merged_path) return False # No coffeescript compiler, skip this file # Replace / with os separators and escape it - file_path_escaped = helper.shellquote(file_path.replace("/", os.path.sep)) + file_path_escaped = helper.shellquote(os.path.join(*file_path.split("/"))) if "%s" in config.coffeescript_compiler: # Replace %s with coffeescript file - command = config.coffeescript_compiler.replace("%s", file_path_escaped) + command = config.coffeescript_compiler % file_path_escaped else: # Put coffeescript file to end command = config.coffeescript_compiler + " " + file_path_escaped # Start compiling s = time.time() compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE) - out = compiler.stdout.read() + out = compiler.stdout.read().decode("utf8") compiler.wait() logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s)) # Check errors - if out and out.startswith(b"("): # No error found + if out and out.startswith("("): # No error found parts.append(out) else: # Put error message in place of source code error = out - logging.error("%s Compile error: %s" % (file_relative_path, error)) - error_escaped = re.escape(error).replace(b"\n", b"\\n").replace(br"\\n", br"\n") + logging.error("%s Compile error: %s" % (file_path, error)) parts.append( - b"alert('%s compile error: %s');" % - (file_relative_path.encode(), error_escaped) + "alert('%s compile error: %s');" % + (file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n")) ) else: # Not changed use the old_part - parts.append(old_parts[file_relative_path]) + parts.append(old_parts[file_path]) else: # Add to parts - parts.append(open(file_path, "rb").read()) + parts.append(open(file_path).read().decode("utf8")) - merged = b"\n".join(parts) + merged = u"\n".join(parts) if ext == "css": # Vendor prefix css from lib.cssvendor import cssvendor merged = cssvendor.prefix(merged) - merged = merged.replace(b"\r", b"") - open(merged_path, "wb").write(merged) + merged = merged.replace("\r", "") + open(merged_path, "wb").write(merged.encode("utf8")) logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total)) diff --git a/src/Debug/DebugReloader.py b/src/Debug/DebugReloader.py index 482c7921..1c643f6e 100644 --- a/src/Debug/DebugReloader.py +++ b/src/Debug/DebugReloader.py @@ -1,69 +1,48 @@ import logging import time -import os +import threading from Config import config -if config.debug and config.action == "main": +if config.debug: # Only load pyfilesytem if using debug mode try: - import watchdog - import watchdog.observers - import watchdog.events - logging.debug("Watchdog fs listener detected, source code autoreload enabled") - enabled = True - except Exception as err: - logging.debug("Watchdog fs listener could not be loaded: %s" % err) - enabled = False + from fs.osfs import OSFS + pyfilesystem = OSFS("src") + pyfilesystem_plugins = OSFS("plugins") + logging.debug("Pyfilesystem detected, source code autoreload enabled") + except Exception, err: + pyfilesystem = False else: - enabled = False + pyfilesystem = False class DebugReloader: - def __init__(self, paths=None): - if not paths: - paths = ["src", "plugins", config.data_dir + "/__plugins__"] - self.log = logging.getLogger("DebugReloader") + + def __init__(self, callback, directory="/"): self.last_chaged = 0 - self.callbacks = [] - if enabled: - self.observer = watchdog.observers.Observer() - event_handler = watchdog.events.FileSystemEventHandler() - event_handler.on_modified = event_handler.on_deleted = self.onChanged - event_handler.on_created = event_handler.on_moved = self.onChanged - for path in paths: - if not os.path.isdir(path): - continue - self.log.debug("Adding autoreload: %s" % path) - self.observer.schedule(event_handler, path, recursive=True) - self.observer.start() + if pyfilesystem: + self.directory = directory + self.callback = callback + logging.debug("Adding autoreload: %s, cb: %s" % (directory, callback)) + thread = threading.Thread(target=self.addWatcher) + thread.daemon = True + thread.start() - def addCallback(self, f): - self.callbacks.append(f) - - def onChanged(self, evt): - path = evt.src_path - ext = path.rsplit(".", 1)[-1] - if ext not in ["py", "json"] or "Test" in path or time.time() - self.last_chaged < 1.0: - return False - self.last_chaged = time.time() - if os.path.isfile(path): - time_modified = os.path.getmtime(path) - else: - time_modified = 0 - self.log.debug("File changed: %s reloading source code (modified %.3fs ago)" % (evt, time.time() - time_modified)) - if time.time() - time_modified > 5: # Probably it's just an attribute change, ignore it - return False + def addWatcher(self, recursive=True): + try: + time.sleep(1) # Wait for .pyc compiles + pyfilesystem.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive) + pyfilesystem_plugins.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive) + except Exception, err: + print "File system watcher failed: %s (on linux pyinotify not gevent compatible yet :( )" % err + def changed(self, evt): + if ( + not evt.path or "%s/" % config.data_dir in evt.path or + not evt.path.endswith("py") or + time.time() - self.last_chaged < 1 + ): + return False # Ignore *.pyc changes and no reload within 1 sec time.sleep(0.1) # Wait for lock release - for callback in self.callbacks: - try: - callback() - except Exception as err: - self.log.exception(err) - - def stop(self): - if enabled: - self.observer.stop() - self.log.debug("Stopped autoreload observer") - -watcher = DebugReloader() + self.callback() + self.last_chaged = time.time() diff --git a/src/Debug/__init__.py b/src/Debug/__init__.py index e69de29b..8632f92f 100644 --- a/src/Debug/__init__.py +++ b/src/Debug/__init__.py @@ -0,0 +1 @@ +from DebugReloader import DebugReloader \ No newline at end of file diff --git a/src/File/FileRequest.py b/src/File/FileRequest.py index c082c378..bfe39a66 100644 --- a/src/File/FileRequest.py +++ b/src/File/FileRequest.py @@ -1,9 +1,7 @@ # Included modules import os import time -import json -import collections -import itertools +from cStringIO import StringIO # Third party modules import gevent @@ -11,20 +9,13 @@ import gevent from Debug import Debug from Config import config from util import RateLimit -from util import Msgpack +from util import StreamingMsgpack from util import helper -from Plugin import PluginManager -from contextlib import closing FILE_BUFF = 1024 * 512 -class RequestError(Exception): - pass - - -# Incoming requests -@PluginManager.acceptPlugins +# Request from me class FileRequest(object): __slots__ = ("server", "connection", "req_id", "sites", "log", "responded") @@ -47,8 +38,7 @@ class FileRequest(object): def response(self, msg, streaming=False): if self.responded: - if config.verbose: - self.log.debug("Req id %s already responded" % self.req_id) + self.log.debug("Req id %s already responded" % self.req_id) return if not isinstance(msg, dict): # If msg not a dict create a {"body": msg} msg = {"body": msg} @@ -60,297 +50,219 @@ class FileRequest(object): # Route file requests def route(self, cmd, req_id, params): self.req_id = req_id - # Don't allow other sites than locked - if "site" in params and self.connection.target_onion: - valid_sites = self.connection.getValidSites() - if params["site"] not in valid_sites and valid_sites != ["global"]: - self.response({"error": "Invalid site"}) - self.connection.log( - "Site lock violation: %s not in %s, target onion: %s" % - (params["site"], valid_sites, self.connection.target_onion) - ) - self.connection.badAction(5) - return False - if cmd == "update": + if cmd == "getFile": + self.actionGetFile(params) + elif cmd == "streamFile": + self.actionStreamFile(params) + elif cmd == "update": event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"]) - # If called more than once within 15 sec only keep the last update - RateLimit.callAsync(event, max(self.connection.bad_actions, 15), self.actionUpdate, params) - else: - func_name = "action" + cmd[0].upper() + cmd[1:] - func = getattr(self, func_name, None) - if cmd not in ["getFile", "streamFile"]: # Skip IO bound functions - if self.connection.cpu_time > 0.5: - self.log.debug( - "Delay %s %s, cpu_time used by connection: %.3fs" % - (self.connection.ip, cmd, self.connection.cpu_time) - ) - time.sleep(self.connection.cpu_time) - if self.connection.cpu_time > 5: - self.connection.close("Cpu time: %.3fs" % self.connection.cpu_time) - s = time.time() - if func: - func(params) - else: - self.actionUnknown(cmd, params) + if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second + self.response({"ok": "File update queued"}) + # If called more than once within 10 sec only keep the last update + RateLimit.callAsync(event, 10, self.actionUpdate, params) - if cmd not in ["getFile", "streamFile"]: - taken = time.time() - s - taken_sent = self.connection.last_sent_time - self.connection.last_send_time - self.connection.cpu_time += taken - taken_sent + elif cmd == "pex": + self.actionPex(params) + elif cmd == "listModified": + self.actionListModified(params) + elif cmd == "getHashfield": + self.actionGetHashfield(params) + elif cmd == "findHashIds": + self.actionFindHashIds(params) + elif cmd == "setHashfield": + self.actionSetHashfield(params) + elif cmd == "siteReload": + self.actionSiteReload(params) + elif cmd == "sitePublish": + self.actionSitePublish(params) + elif cmd == "ping": + self.actionPing() + else: + self.actionUnknown(cmd, params) # Update a site file request def actionUpdate(self, params): site = self.sites.get(params["site"]) - if not site or not site.isServing(): # Site unknown or not serving + if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) - self.connection.badAction(1) - self.connection.badAction(5) return False - - inner_path = params.get("inner_path", "") - if not inner_path.endswith("content.json"): - self.response({"error": "Only content.json update allowed"}) - self.connection.badAction(5) - return - - current_content_modified = site.content_manager.contents.get(inner_path, {}).get("modified", 0) - should_validate_content = True - if "modified" in params and params["modified"] <= current_content_modified: - should_validate_content = False - valid = None # Same or earlier content as we have - - body = params["body"] - if not body: # No body sent, we have to download it first - site.log.debug("Missing body from update for file %s, downloading ..." % inner_path) - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer - try: - body = peer.getFile(site.address, inner_path).read() - except Exception as err: - site.log.debug("Can't download updated file %s: %s" % (inner_path, err)) - self.response({"error": "Invalid File update: Failed to download updated file content"}) - self.connection.badAction(5) - return - - if should_validate_content: - try: - if type(body) is str: - body = body.encode() - # elif type(body) is list: - # content = json.loads(bytes(list).decode()) - content = json.loads(body.decode()) - except Exception as err: - site.log.debug("Update for %s is invalid JSON: %s" % (inner_path, err)) - self.response({"error": "File invalid JSON"}) - self.connection.badAction(5) - return - - file_uri = "%s/%s:%s" % (site.address, inner_path, content["modified"]) - - if self.server.files_parsing.get(file_uri): # Check if we already working on it - valid = None # Same file - else: - try: - valid = site.content_manager.verifyFile(inner_path, content) - except Exception as err: - site.log.debug("Update for %s is invalid: %s" % (inner_path, err)) - error = err - valid = False - + if site.settings["own"] and params["inner_path"].endswith("content.json"): + self.log.debug( + "Someone trying to push a file to own site %s, reload local %s first" % + (site.address, params["inner_path"]) + ) + changed, deleted = site.content_manager.loadContent(params["inner_path"], add_bad_files=False) + if changed or deleted: # Content.json changed locally + site.settings["size"] = site.content_manager.getTotalSize() # Update site size + buff = StringIO(params["body"]) + valid = site.content_manager.verifyFile(params["inner_path"], buff) if valid is True: # Valid and changed - site.log.info("Update for %s looks valid, saving..." % inner_path) - self.server.files_parsing[file_uri] = True - site.storage.write(inner_path, body) - del params["body"] + self.log.info("Update for %s looks valid, saving..." % params["inner_path"]) + buff.seek(0) + site.storage.write(params["inner_path"], buff) - site.onFileDone(inner_path) # Trigger filedone + site.onFileDone(params["inner_path"]) # Trigger filedone - # Download every changed file from peer - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer - # On complete publish to other peers - diffs = params.get("diffs", {}) - site.onComplete.once(lambda: site.publish(inner_path=inner_path, diffs=diffs, limit=6), "publish_%s" % inner_path) + if params["inner_path"].endswith("content.json"): # Download every changed file from peer + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer + # On complete publish to other peers + site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"]), "publish_%s" % params["inner_path"]) - # Load new content file and download changed files in new thread - def downloader(): - site.downloadContent(inner_path, peer=peer, diffs=params.get("diffs", {})) - del self.server.files_parsing[file_uri] + # Load new content file and download changed files in new thread + gevent.spawn( + lambda: site.downloadContent(params["inner_path"], peer=peer) + ) - gevent.spawn(downloader) - - self.response({"ok": "Thanks, file %s updated!" % inner_path}) - self.connection.goodAction() + self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]}) elif valid is None: # Not changed - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update old") # Add or get peer + peer = site.addPeer(*params["peer"], return_peer=True) # Add or get peer if peer: - if not peer.connection: - peer.connect(self.connection) # Assign current connection to peer - if inner_path in site.content_manager.contents: - peer.last_content_json_update = site.content_manager.contents[inner_path]["modified"] - if config.verbose: - site.log.debug( - "Same version, adding new peer for locked files: %s, tasks: %s" % - (peer.key, len(site.worker_manager.tasks)) - ) + self.log.debug( + "Same version, adding new peer for locked files: %s, tasks: %s" % + (peer.key, len(site.worker_manager.tasks)) + ) for task in site.worker_manager.tasks: # New peer add to every ongoing task - if task["peers"] and not task["optional_hash_id"]: + if task["peers"]: # Download file from this peer too if its peer locked site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) self.response({"ok": "File not changed"}) - self.connection.badAction() - else: # Invalid sign or sha hash - self.response({"error": "File %s invalid: %s" % (inner_path, error)}) - self.connection.badAction(5) - - def isReadable(self, site, inner_path, file, pos): - return True + else: # Invalid sign or sha1 hash + self.log.debug("Update for %s is invalid" % params["inner_path"]) + self.response({"error": "File invalid"}) # Send file content request - def handleGetFile(self, params, streaming=False): + def actionGetFile(self, params): site = self.sites.get(params["site"]) - if not site or not site.isServing(): # Site unknown or not serving + if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) - self.connection.badAction(5) return False try: file_path = site.storage.getPath(params["inner_path"]) - if streaming: - file_obj = site.storage.open(params["inner_path"]) - else: - file_obj = Msgpack.FilePart(file_path, "rb") - - with file_obj as file: + if config.debug_socket: + self.log.debug("Opening file: %s" % file_path) + with StreamingMsgpack.FilePart(file_path, "rb") as file: file.seek(params["location"]) - read_bytes = params.get("read_bytes", FILE_BUFF) + file.read_bytes = FILE_BUFF file_size = os.fstat(file.fileno()).st_size + assert params["location"] < file_size - if file_size > read_bytes: # Check if file is readable at current position (for big files) - if not self.isReadable(site, params["inner_path"], file, params["location"]): - raise RequestError("File not readable at position: %s" % params["location"]) - else: - if params.get("file_size") and params["file_size"] != file_size: - self.connection.badAction(2) - raise RequestError("File size does not match: %sB != %sB" % (params["file_size"], file_size)) + back = { + "body": file, + "size": file_size, + "location": min(file.tell() + FILE_BUFF, file_size) + } + if config.debug_socket: + self.log.debug( + "Sending file %s from position %s to %s" % + (file_path, params["location"], back["location"]) + ) + self.response(back, streaming=True) - if not streaming: - file.read_bytes = read_bytes - - if params["location"] > file_size: - self.connection.badAction(5) - raise RequestError("Bad file location") - - if streaming: - back = { - "size": file_size, - "location": min(file.tell() + read_bytes, file_size), - "stream_bytes": min(read_bytes, file_size - params["location"]) - } - self.response(back) - self.sendRawfile(file, read_bytes=read_bytes) - else: - back = { - "body": file, - "size": file_size, - "location": min(file.tell() + file.read_bytes, file_size) - } - self.response(back, streaming=True) - - bytes_sent = min(read_bytes, file_size - params["location"]) # Number of bytes we going to send + bytes_sent = min(FILE_BUFF, file_size - params["location"]) # Number of bytes we going to send site.settings["bytes_sent"] = site.settings.get("bytes_sent", 0) + bytes_sent if config.debug_socket: self.log.debug("File %s at position %s sent %s bytes" % (file_path, params["location"], bytes_sent)) # Add peer to site if not added before - connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request") + connected_peer = site.addPeer(self.connection.ip, self.connection.port) if connected_peer: # Just added connected_peer.connect(self.connection) # Assign current connection to peer - return {"bytes_sent": bytes_sent, "file_size": file_size, "location": params["location"]} - - except RequestError as err: - self.log.debug("GetFile %s %s %s request error: %s" % (self.connection, params["site"], params["inner_path"], Debug.formatException(err))) - self.response({"error": "File read error: %s" % err}) - except OSError as err: - if config.verbose: - self.log.debug("GetFile read error: %s" % Debug.formatException(err)) - self.response({"error": "File read error"}) - return False - except Exception as err: - self.log.error("GetFile exception: %s" % Debug.formatException(err)) - self.response({"error": "File read exception"}) + except Exception, err: + self.log.debug("GetFile read error: %s" % Debug.formatException(err)) + self.response({"error": "File read error: %s" % Debug.formatException(err)}) return False - def actionGetFile(self, params): - return self.handleGetFile(params) - + # New-style file streaming out of Msgpack context def actionStreamFile(self, params): - return self.handleGetFile(params, streaming=True) + site = self.sites.get(params["site"]) + if not site or not site.settings["serving"]: # Site unknown or not serving + self.response({"error": "Unknown site"}) + return False + try: + if config.debug_socket: + self.log.debug("Opening file: %s" % params["inner_path"]) + with site.storage.open(params["inner_path"]) as file: + file.seek(params["location"]) + file_size = os.fstat(file.fileno()).st_size + stream_bytes = min(FILE_BUFF, file_size - params["location"]) + assert stream_bytes >= 0 + + back = { + "size": file_size, + "location": min(file.tell() + FILE_BUFF, file_size), + "stream_bytes": stream_bytes + } + if config.debug_socket: + self.log.debug( + "Sending file %s from position %s to %s" % + (params["inner_path"], params["location"], back["location"]) + ) + self.response(back) + self.sendRawfile(file, read_bytes=FILE_BUFF) + + site.settings["bytes_sent"] = site.settings.get("bytes_sent", 0) + stream_bytes + if config.debug_socket: + self.log.debug("File %s at position %s sent %s bytes" % (params["inner_path"], params["location"], stream_bytes)) + + # Add peer to site if not added before + connected_peer = site.addPeer(self.connection.ip, self.connection.port) + if connected_peer: # Just added + connected_peer.connect(self.connection) # Assign current connection to peer + + except Exception, err: + self.log.debug("GetFile read error: %s" % Debug.formatException(err)) + self.response({"error": "File read error: %s" % Debug.formatException(err)}) + return False # Peer exchange request def actionPex(self, params): site = self.sites.get(params["site"]) - if not site or not site.isServing(): # Site unknown or not serving + if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) - self.connection.badAction(5) return False got_peer_keys = [] added = 0 # Add requester peer to site - connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request") - + connected_peer = site.addPeer(self.connection.ip, self.connection.port) if connected_peer: # It was not registered before added += 1 connected_peer.connect(self.connection) # Assign current connection to peer # Add sent peers to site - for packed_address in itertools.chain(params.get("peers", []), params.get("peers_ipv6", [])): + for packed_address in params["peers"]: address = helper.unpackAddress(packed_address) got_peer_keys.append("%s:%s" % address) - if site.addPeer(*address, source="pex"): - added += 1 - - # Add sent onion peers to site - for packed_address in params.get("peers_onion", []): - address = helper.unpackOnionAddress(packed_address) - got_peer_keys.append("%s:%s" % address) - if site.addPeer(*address, source="pex"): + if site.addPeer(*address): added += 1 # Send back peers that is not in the sent list and connectable (not port 0) - packed_peers = helper.packPeers(site.getConnectablePeers(params["need"], ignore=got_peer_keys, allow_private=False)) - + packed_peers = [peer.packMyAddress() for peer in site.getConnectablePeers(params["need"], got_peer_keys)] if added: site.worker_manager.onPeers() - if config.verbose: - self.log.debug( - "Added %s peers to %s using pex, sending back %s" % - (added, site, {key: len(val) for key, val in packed_peers.items()}) - ) - - back = { - "peers": packed_peers["ipv4"], - "peers_ipv6": packed_peers["ipv6"], - "peers_onion": packed_peers["onion"] - } - - self.response(back) + self.log.debug("Added %s peers to %s using pex, sending back %s" % (added, site, len(packed_peers))) + self.response({"peers": packed_peers}) # Get modified content.json files since def actionListModified(self, params): site = self.sites.get(params["site"]) - if not site or not site.isServing(): # Site unknown or not serving + if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) - self.connection.badAction(5) return False - modified_files = site.content_manager.listModified(params["since"]) + modified_files = { + inner_path: content["modified"] + for inner_path, content in site.content_manager.contents.iteritems() + if content["modified"] > params["since"] + } # Add peer to site if not added before - connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request") + connected_peer = site.addPeer(self.connection.ip, self.connection.port) if connected_peer: # Just added connected_peer.connect(self.connection) # Assign current connection to peer @@ -358,93 +270,82 @@ class FileRequest(object): def actionGetHashfield(self, params): site = self.sites.get(params["site"]) - if not site or not site.isServing(): # Site unknown or not serving + if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) - self.connection.badAction(5) return False # Add peer to site if not added before - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="request") + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) if not peer.connection: # Just added peer.connect(self.connection) # Assign current connection to peer peer.time_my_hashfield_sent = time.time() # Don't send again if not changed - self.response({"hashfield_raw": site.content_manager.hashfield.tobytes()}) - - def findHashIds(self, site, hash_ids, limit=100): - back = collections.defaultdict(lambda: collections.defaultdict(list)) - found = site.worker_manager.findOptionalHashIds(hash_ids, limit=limit) - - for hash_id, peers in found.items(): - for peer in peers: - ip_type = helper.getIpType(peer.ip) - if len(back[ip_type][hash_id]) < 20: - back[ip_type][hash_id].append(peer.packMyAddress()) - return back + self.response({"hashfield_raw": site.content_manager.hashfield.tostring()}) def actionFindHashIds(self, params): site = self.sites.get(params["site"]) - s = time.time() - if not site or not site.isServing(): # Site unknown or not serving + if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) - self.connection.badAction(5) return False - event_key = "%s_findHashIds_%s_%s" % (self.connection.ip, params["site"], len(params["hash_ids"])) - if self.connection.cpu_time > 0.5 or not RateLimit.isAllowed(event_key, 60 * 5): - time.sleep(0.1) - back = self.findHashIds(site, params["hash_ids"], limit=10) + found = site.worker_manager.findOptionalHashIds(params["hash_ids"]) + + back = {} + for hash_id, peers in found.iteritems(): + back[hash_id] = [helper.packAddress(peer.ip, peer.port) for peer in peers] + # Check my hashfield + if config.ip_external: + my_ip = config.ip_external else: - back = self.findHashIds(site, params["hash_ids"]) - RateLimit.called(event_key) - - my_hashes = [] - my_hashfield_set = set(site.content_manager.hashfield) + my_ip = self.server.ip for hash_id in params["hash_ids"]: - if hash_id in my_hashfield_set: - my_hashes.append(hash_id) - - if config.verbose: - self.log.debug( - "Found: %s for %s hashids in %.3fs" % - ({key: len(val) for key, val in back.items()}, len(params["hash_ids"]), time.time() - s) - ) - self.response({"peers": back["ipv4"], "peers_onion": back["onion"], "peers_ipv6": back["ipv6"], "my": my_hashes}) + if hash_id in site.content_manager.hashfield: + if hash_id not in back: + back[hash_id] = [] + back[hash_id].append(helper.packAddress(my_ip, self.server.port)) # Add myself + self.log.debug( + "Found: %s/%s" % + (len(back), len(params["hash_ids"])) + ) + self.response({"peers": back}) def actionSetHashfield(self, params): site = self.sites.get(params["site"]) - if not site or not site.isServing(): # Site unknown or not serving + if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) - self.connection.badAction(5) return False - # Add or get peer - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection, source="request") + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer if not peer.connection: peer.connect(self.connection) - peer.hashfield.replaceFromBytes(params["hashfield_raw"]) + peer.hashfield.replaceFromString(params["hashfield_raw"]) self.response({"ok": "Updated"}) + def actionSiteReload(self, params): + if self.connection.ip != "127.0.0.1" and self.connection.ip != config.ip_external: + self.response({"error": "Only local host allowed"}) + + site = self.sites.get(params["site"]) + site.content_manager.loadContent(params["inner_path"], add_bad_files=False) + site.storage.verifyFiles(quick_check=True) + site.updateWebsocket() + + self.response({"ok": "Reloaded"}) + + def actionSitePublish(self, params): + if self.connection.ip != "127.0.0.1" and self.connection.ip != config.ip_external: + self.response({"error": "Only local host allowed"}) + + site = self.sites.get(params["site"]) + num = site.publish(inner_path=params.get("inner_path", "content.json")) + + self.response({"ok": "Successfuly published to %s peers" % num}) + # Send a simple Pong! answer - def actionPing(self, params): - self.response(b"Pong!") - - # Check requested port of the other peer - def actionCheckport(self, params): - if helper.getIpType(self.connection.ip) == "ipv6": - sock_address = (self.connection.ip, params["port"], 0, 0) - else: - sock_address = (self.connection.ip, params["port"]) - - with closing(helper.createSocket(self.connection.ip)) as sock: - sock.settimeout(5) - if sock.connect_ex(sock_address) == 0: - self.response({"status": "open", "ip_external": self.connection.ip}) - else: - self.response({"status": "closed", "ip_external": self.connection.ip}) + def actionPing(self): + self.response("Pong!") # Unknown command def actionUnknown(self, cmd, params): self.response({"error": "Unknown command: %s" % cmd}) - self.connection.badAction(5) diff --git a/src/File/FileServer.py b/src/File/FileServer.py index b7a942fc..27e681a1 100644 --- a/src/File/FileServer.py +++ b/src/File/FileServer.py @@ -1,155 +1,40 @@ import logging +import urllib2 +import re import time -import random -import socket -import sys import gevent -import gevent.pool -from gevent.server import StreamServer -import util -from util import helper from Config import config -from .FileRequest import FileRequest -from Peer import PeerPortchecker +from FileRequest import FileRequest from Site import SiteManager -from Connection import ConnectionServer -from Plugin import PluginManager from Debug import Debug +from Connection import ConnectionServer +from util import UpnpPunch -@PluginManager.acceptPlugins class FileServer(ConnectionServer): - def __init__(self, ip=config.fileserver_ip, port=config.fileserver_port, ip_type=config.fileserver_ip_type): - self.site_manager = SiteManager.site_manager - self.portchecker = PeerPortchecker.PeerPortchecker(self) - self.log = logging.getLogger("FileServer") - self.ip_type = ip_type - self.ip_external_list = [] - - self.supported_ip_types = ["ipv4"] # Outgoing ip_type support - if helper.getIpType(ip) == "ipv6" or self.isIpv6Supported(): - self.supported_ip_types.append("ipv6") - - if ip_type == "ipv6" or (ip_type == "dual" and "ipv6" in self.supported_ip_types): - ip = ip.replace("*", "::") - else: - ip = ip.replace("*", "0.0.0.0") - - if config.tor == "always": - port = config.tor_hs_port - config.fileserver_port = port - elif port == 0: # Use random port - port_range_from, port_range_to = list(map(int, config.fileserver_port_range.split("-"))) - port = self.getRandomPort(ip, port_range_from, port_range_to) - config.fileserver_port = port - if not port: - raise Exception("Can't find bindable port") - if not config.tor == "always": - config.saveValue("fileserver_port", port) # Save random port value for next restart - config.arguments.fileserver_port = port - + def __init__(self, ip=config.fileserver_ip, port=config.fileserver_port): ConnectionServer.__init__(self, ip, port, self.handleRequest) - self.log.debug("Supported IP types: %s" % self.supported_ip_types) - - if ip_type == "dual" and ip == "::": - # Also bind to ipv4 addres in dual mode - try: - self.log.debug("Binding proxy to %s:%s" % ("::", self.port)) - self.stream_server_proxy = StreamServer( - ("0.0.0.0", self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100 - ) - except Exception as err: - self.log.info("StreamServer proxy create error: %s" % Debug.formatException(err)) - - self.port_opened = {} - - self.sites = self.site_manager.sites - self.last_request = time.time() - self.files_parsing = {} - self.ui_server = None - - def getRandomPort(self, ip, port_range_from, port_range_to): - """Generates Random Port from given range - Args: - ip: IP Address - port_range_from: From Range - port_range_to: to Range - """ - self.log.info("Getting random port in range %s-%s..." % (port_range_from, port_range_to)) - tried = [] - for bind_retry in range(100): - port = random.randint(port_range_from, port_range_to) - if port in tried: - continue - tried.append(port) - sock = helper.createSocket(ip) - try: - sock.bind((ip, port)) - success = True - except Exception as err: - self.log.warning("Error binding to port %s: %s" % (port, err)) - success = False - sock.close() - if success: - self.log.info("Found unused random port: %s" % port) - return port - else: - time.sleep(0.1) - return False - - def isIpv6Supported(self): - if config.tor == "always": - return True - # Test if we can connect to ipv6 address - ipv6_testip = "fcec:ae97:8902:d810:6c92:ec67:efb2:3ec5" - try: - sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) - sock.connect((ipv6_testip, 80)) - local_ipv6 = sock.getsockname()[0] - if local_ipv6 == "::1": - self.log.debug("IPv6 not supported, no local IPv6 address") - return False - else: - self.log.debug("IPv6 supported on IP %s" % local_ipv6) - return True - except socket.error as err: - self.log.warning("IPv6 not supported: %s" % err) - return False - except Exception as err: - self.log.error("IPv6 check error: %s" % err) - return False - - def listenProxy(self): - try: - self.stream_server_proxy.serve_forever() - except Exception as err: - if err.errno == 98: # Address already in use error - self.log.debug("StreamServer proxy listen error: %s" % err) - else: - self.log.info("StreamServer proxy listen error: %s" % err) + if config.ip_external: # Ip external definied in arguments + self.port_opened = True + SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist + else: + self.port_opened = None # Is file server opened on router + self.sites = {} # Handle request to fileserver def handleRequest(self, connection, message): - if config.verbose: - if "params" in message: - self.log.debug( - "FileRequest: %s %s %s %s" % - (str(connection), message["cmd"], message["params"].get("site"), message["params"].get("inner_path")) - ) - else: - self.log.debug("FileRequest: %s %s" % (str(connection), message["cmd"])) + if "params" in message: + self.log.debug( + "FileRequest: %s %s %s %s" % + (str(connection), message["cmd"], message["params"].get("site"), message["params"].get("inner_path")) + ) + else: + self.log.debug("FileRequest: %s %s" % (str(connection), message["cmd"])) req = FileRequest(self, connection) req.route(message["cmd"], message.get("req_id"), message.get("params")) - if not self.has_internet and not connection.is_private_ip: - self.has_internet = True - self.onInternetOnline() - - def onInternetOnline(self): - self.log.info("Internet online") - gevent.spawn(self.checkSites, check_files=False, force_port_check=True) # Reload the FileRequest class to prevent restarts in debug mode def reload(self): @@ -157,253 +42,210 @@ class FileServer(ConnectionServer): import imp FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest - def portCheck(self): - if config.offline: - self.log.info("Offline mode: port check disabled") - res = {"ipv4": None, "ipv6": None} - self.port_opened = res - return res + # Try to open the port using upnp + def openport(self, port=None, check=True): + if not port: + port = self.port + if self.port_opened: + return True # Port already opened + if check: # Check first if its already opened + if self.testOpenport(port)["result"] is True: + return True # Port already opened - if config.ip_external: - for ip_external in config.ip_external: - SiteManager.peer_blacklist.append((ip_external, self.port)) # Add myself to peer blacklist + self.log.info("Trying to open port using UpnpPunch...") + try: + upnp_punch = UpnpPunch.open_port(self.port, 'ZeroNet') + upnp_punch = True + except Exception, err: + self.log.error("UpnpPunch run error: %s" % Debug.formatException(err)) + upnp_punch = False - ip_external_types = set([helper.getIpType(ip) for ip in config.ip_external]) - res = { - "ipv4": "ipv4" in ip_external_types, - "ipv6": "ipv6" in ip_external_types - } - self.ip_external_list = config.ip_external - self.port_opened.update(res) - self.log.info("Server port opened based on configuration ipv4: %s, ipv6: %s" % (res["ipv4"], res["ipv6"])) - return res + if upnp_punch and self.testOpenport(port)["result"] is True: + return True - self.port_opened = {} - if self.ui_server: - self.ui_server.updateWebsocket() + self.log.info("Upnp mapping failed :( Please forward port %s on your router to your ipaddress" % port) + return False - if "ipv6" in self.supported_ip_types: - res_ipv6_thread = gevent.spawn(self.portchecker.portCheck, self.port, "ipv6") + # Test if the port is open + def testOpenport(self, port=None): + time.sleep(1) # Wait for port open + if not port: + port = self.port + back = self.testOpenportPortchecker(port) + if back["result"] is True: # Successful port check + return back + else: # Alternative port checker + return self.testOpenportCanyouseeme(port) + + def testOpenportPortchecker(self, port=None): + self.log.info("Checking port %s using portchecker.co..." % port) + try: + data = urllib2.urlopen("http://portchecker.co/check", "port=%s" % port, timeout=20.0).read() + message = re.match('.*
    (.*?)
    ', data, re.DOTALL).group(1) + message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags + except Exception, err: + message = "Error: %s" % Debug.formatException(err) + data = "" + + if "closed" in message or "Error" in message: + self.log.info("[BAD :(] Port closed: %s" % message) + if port == self.port: + self.port_opened = False # Self port, update port_opened status + match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) # Try find my external ip in message + if match: # Found my ip in message + config.ip_external = match.group(1) + SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist + else: + config.ip_external = False + return {"result": False, "message": message} else: - res_ipv6_thread = None + self.log.info("[OK :)] Port open: %s" % message) + if port == self.port: # Self port, update port_opened status + self.port_opened = True + match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) # Try find my external ip in message + if match: # Found my ip in message + config.ip_external = match.group(1) + SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist + else: + config.ip_external = False + return {"result": True, "message": message} - res_ipv4 = self.portchecker.portCheck(self.port, "ipv4") - if not res_ipv4["opened"] and config.tor != "always": - if self.portchecker.portOpen(self.port): - res_ipv4 = self.portchecker.portCheck(self.port, "ipv4") + def testOpenportCanyouseeme(self, port=None): + self.log.info("Checking port %s using canyouseeme.org..." % port) + try: + data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read() + message = re.match('.*

    (.*?)

    ', data, re.DOTALL).group(1) + message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ")) # Strip http tags + except Exception, err: + message = "Error: %s" % Debug.formatException(err) - if res_ipv6_thread is None: - res_ipv6 = {"ip": None, "opened": None} + if "Error" in message: + self.log.info("[BAD :(] Port closed: %s" % message) + if port == self.port: + self.port_opened = False # Self port, update port_opened status + match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message + if match: # Found my ip in message + config.ip_external = match.group(1) + SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist + else: + config.ip_external = False + return {"result": False, "message": message} else: - res_ipv6 = res_ipv6_thread.get() - if res_ipv6["opened"] and not helper.getIpType(res_ipv6["ip"]) == "ipv6": - self.log.info("Invalid IPv6 address from port check: %s" % res_ipv6["ip"]) - res_ipv6["opened"] = False + self.log.info("[OK :)] Port open: %s" % message) + if port == self.port: # Self port, update port_opened status + self.port_opened = True + match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message + if match: # Found my ip in message + config.ip_external = match.group(1) + SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist + else: + config.ip_external = False + return {"result": True, "message": message} - self.ip_external_list = [] - for res_ip in [res_ipv4, res_ipv6]: - if res_ip["ip"] and res_ip["ip"] not in self.ip_external_list: - self.ip_external_list.append(res_ip["ip"]) - SiteManager.peer_blacklist.append((res_ip["ip"], self.port)) - - self.log.info("Server port opened ipv4: %s, ipv6: %s" % (res_ipv4["opened"], res_ipv6["opened"])) - - res = {"ipv4": res_ipv4["opened"], "ipv6": res_ipv6["opened"]} - - # Add external IPs from local interfaces - interface_ips = helper.getInterfaceIps("ipv4") - if "ipv6" in self.supported_ip_types: - interface_ips += helper.getInterfaceIps("ipv6") - for ip in interface_ips: - if not helper.isPrivateIp(ip) and ip not in self.ip_external_list: - self.ip_external_list.append(ip) - res[helper.getIpType(ip)] = True # We have opened port if we have external ip - SiteManager.peer_blacklist.append((ip, self.port)) - self.log.debug("External ip found on interfaces: %s" % ip) - - self.port_opened.update(res) - - if self.ui_server: - self.ui_server.updateWebsocket() - - return res + # Set external ip without testing + def setIpExternal(self, ip_external): + logging.info("Setting external ip without testing: %s..." % ip_external) + config.ip_external = ip_external + self.port_opened = True # Check site file integrity - def checkSite(self, site, check_files=False): - if site.isServing(): - site.announce(mode="startup") # Announce site to tracker - site.update(check_files=check_files) # Update site's content.json and download changed files - site.sendMyHashfield() - site.updateHashfield() + def checkSite(self, site): + if site.settings["serving"]: + site.announce() # Announce site to tracker + site.update() # Update site's content.json and download changed files + if self.port_opened is False: # In passive mode keep 5 active peer connection to get the updates + site.needConnections() # Check sites integrity - @util.Noparallel() - def checkSites(self, check_files=False, force_port_check=False): - self.log.debug("Checking sites...") - s = time.time() - sites_checking = False - if not self.port_opened or force_port_check: # Test and open port if not tested yet - if len(self.sites) <= 2: # Don't wait port opening on first startup - sites_checking = True - for address, site in list(self.sites.items()): - gevent.spawn(self.checkSite, site, check_files) + def checkSites(self): + if self.port_opened is None: # Test and open port if not tested yet + self.openport() - self.portCheck() - - if not self.port_opened["ipv4"]: - self.tor_manager.startOnions() - - if not sites_checking: - check_pool = gevent.pool.Pool(5) - # Check sites integrity - for site in sorted(list(self.sites.values()), key=lambda site: site.settings.get("modified", 0), reverse=True): - if not site.isServing(): - continue - check_thread = check_pool.spawn(self.checkSite, site, check_files) # Check in new thread - time.sleep(2) - if site.settings.get("modified", 0) < time.time() - 60 * 60 * 24: # Not so active site, wait some sec to finish - check_thread.join(timeout=5) - self.log.debug("Checksites done in %.3fs" % (time.time() - s)) - - def cleanupSites(self): - import gc - startup = True - time.sleep(5 * 60) # Sites already cleaned up on startup - peers_protected = set([]) - while 1: - # Sites health care every 20 min - self.log.debug( - "Running site cleanup, connections: %s, internet: %s, protected peers: %s" % - (len(self.connections), self.has_internet, len(peers_protected)) - ) - - for address, site in list(self.sites.items()): - if not site.isServing(): - continue - - if not startup: - site.cleanupPeers(peers_protected) - - time.sleep(1) # Prevent too quick request - - peers_protected = set([]) - for address, site in list(self.sites.items()): - if not site.isServing(): - continue - - if site.peers: - with gevent.Timeout(10, exception=False): - site.announcer.announcePex() - - # Last check modification failed - if site.content_updated is False: - site.update() - elif site.bad_files: - site.retryBadFiles() - - if time.time() - site.settings.get("modified", 0) < 60 * 60 * 24 * 7: - # Keep active connections if site has been modified witin 7 days - connected_num = site.needConnections(check_site_on_reconnect=True) - - if connected_num < config.connected_limit: # This site has small amount of peers, protect them from closing - peers_protected.update([peer.key for peer in site.getConnectedPeers()]) - - time.sleep(1) # Prevent too quick request - - site = None - gc.collect() # Implicit garbage collection - startup = False - time.sleep(60 * 20) - - def announceSite(self, site): - site.announce(mode="update", pex=False) - active_site = time.time() - site.settings.get("modified", 0) < 24 * 60 * 60 - if site.settings["own"] or active_site: - # Check connections more frequently on own and active sites to speed-up first connections - site.needConnections(check_site_on_reconnect=True) - site.sendMyHashfield(3) - site.updateHashfield(3) + self.log.debug("Checking sites integrity..") + for address, site in self.sites.items(): # Check sites integrity + gevent.spawn(self.checkSite, site) # Check in new thread + time.sleep(2) # Prevent too quick request + site = None # Announce sites every 20 min def announceSites(self): - time.sleep(5 * 60) # Sites already announced on startup + import gc + first_announce = True # First start while 1: - config.loadTrackersFile() - s = time.time() - for address, site in list(self.sites.items()): - if not site.isServing(): - continue - gevent.spawn(self.announceSite, site).join(timeout=10) - time.sleep(1) - taken = time.time() - s + # Sites healthcare every 20 min + if config.trackers_file: + config.loadTrackersFile() + for address, site in self.sites.items(): + if site.settings["serving"]: + if first_announce: # Announce to all trackers on startup + site.announce() + else: # If not first run only use PEX + site.announcePex() - # Query all trackers one-by-one in 20 minutes evenly distributed - sleep = max(0, 60 * 20 / len(config.trackers) - taken) + # Retry failed files + if site.bad_files: + site.retryBadFiles() - self.log.debug("Site announce tracker done in %.3fs, sleeping for %.3fs..." % (taken, sleep)) - time.sleep(sleep) + site.cleanupPeers() + + # In passive mode keep 5 active peer connection to get the updates + if self.port_opened is False: + site.needConnections() + + if first_announce: # Send my optional files to peers + site.sendMyHashfield() + site.updateHashfield() + + time.sleep(2) # Prevent too quick request + + site = None + gc.collect() # Implicit grabage collection + + # Find new peers + for tracker_i in range(len(config.trackers)): + time.sleep(60 * 20 / len(config.trackers)) # Query all trackers one-by-one in 20 minutes evenly distributed + if config.trackers_file: + config.loadTrackersFile() + for address, site in self.sites.items(): + site.announce(num=1, pex=False) + site.sendMyHashfield(3) + site.updateHashfield(1) + time.sleep(2) + + first_announce = False # Detects if computer back from wakeup def wakeupWatcher(self): last_time = time.time() - last_my_ips = socket.gethostbyname_ex('')[2] while 1: time.sleep(30) - is_time_changed = time.time() - max(self.last_request, last_time) > 60 * 3 - if is_time_changed: - # If taken more than 3 minute then the computer was in sleep mode + if time.time() - last_time > 60: # If taken more than 60 second then the computer was in sleep mode self.log.info( - "Wakeup detected: time warp from %0.f to %0.f (%0.f sleep seconds), acting like startup..." % + "Wakeup detected: time wrap from %s to %s (%s sleep seconds), acting like startup..." % (last_time, time.time(), time.time() - last_time) ) - - my_ips = socket.gethostbyname_ex('')[2] - is_ip_changed = my_ips != last_my_ips - if is_ip_changed: - self.log.info("IP change detected from %s to %s" % (last_my_ips, my_ips)) - - if is_time_changed or is_ip_changed: - self.checkSites(check_files=False, force_port_check=True) - + self.port_opened = None # Check if we still has the open port on router + self.checkSites() last_time = time.time() - last_my_ips = my_ips # Bind and start serving sites def start(self, check_sites=True): - if self.stopping: - return False + self.sites = SiteManager.site_manager.list() + self.log = logging.getLogger("FileServer") - ConnectionServer.start(self) - - try: - self.stream_server.start() - except Exception as err: - self.log.error("Error listening on: %s:%s: %s" % (self.ip, self.port, err)) - - self.sites = self.site_manager.list() if config.debug: # Auto reload FileRequest on change from Debug import DebugReloader - DebugReloader.watcher.addCallback(self.reload) + DebugReloader(self.reload) if check_sites: # Open port, Update sites, Check files integrity gevent.spawn(self.checkSites) thread_announce_sites = gevent.spawn(self.announceSites) - thread_cleanup_sites = gevent.spawn(self.cleanupSites) thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher) - ConnectionServer.listen(self) + ConnectionServer.start(self) + # thread_wakeup_watcher.kill(exception=Debug.Notify("Stopping FileServer")) + # thread_announce_sites.kill(exception=Debug.Notify("Stopping FileServer")) self.log.debug("Stopped.") - - def stop(self): - if self.running and self.portchecker.upnp_port_opened: - self.log.debug('Closing port %d' % self.port) - try: - self.portchecker.portClose(self.port) - self.log.info('Closed port via upnp.') - except Exception as err: - self.log.info("Failed at attempt to use upnp to close port: %s" % err) - - return ConnectionServer.stop(self) diff --git a/src/File/__init__.py b/src/File/__init__.py index 1eb602d6..20b28a97 100644 --- a/src/File/__init__.py +++ b/src/File/__init__.py @@ -1,2 +1,2 @@ -from .FileServer import FileServer -from .FileRequest import FileRequest \ No newline at end of file +from FileServer import FileServer +from FileRequest import FileRequest \ No newline at end of file diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index 03cc1f47..a543d581 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -1,66 +1,47 @@ import logging import time import sys -import itertools -import collections import gevent -import io +from cStringIO import StringIO from Debug import Debug from Config import config from util import helper -from .PeerHashfield import PeerHashfield -from Plugin import PluginManager +from PeerHashfield import PeerHashfield if config.use_tempfiles: import tempfile # Communicate remote peers -@PluginManager.acceptPlugins class Peer(object): __slots__ = ( - "ip", "port", "site", "key", "connection", "connection_server", "time_found", "time_response", "time_hashfield", - "time_added", "has_hashfield", "is_tracker_connection", "time_my_hashfield_sent", "last_ping", "reputation", - "last_content_json_update", "hashfield", "connection_error", "hash_failed", "download_bytes", "download_time" + "ip", "port", "site", "key", "connection", "time_found", "time_response", "time_hashfield", "time_added", + "time_my_hashfield_sent", "last_ping", "hashfield", "connection_error", "hash_failed", "download_bytes", "download_time" ) - def __init__(self, ip, port, site=None, connection_server=None): + def __init__(self, ip, port, site=None): self.ip = ip self.port = port self.site = site self.key = "%s:%s" % (ip, port) self.connection = None - self.connection_server = connection_server - self.has_hashfield = False # Lazy hashfield object not created yet + self.hashfield = PeerHashfield() # Got optional files hash_id self.time_hashfield = None # Last time peer's hashfiled downloaded self.time_my_hashfield_sent = None # Last time my hashfield sent to peer self.time_found = time.time() # Time of last found in the torrent tracker self.time_response = None # Time of last successful response from peer self.time_added = time.time() self.last_ping = None # Last response time for ping - self.is_tracker_connection = False # Tracker connection instead of normal peer - self.reputation = 0 # More likely to connect if larger - self.last_content_json_update = 0.0 # Modify date of last received content.json self.connection_error = 0 # Series of connection error self.hash_failed = 0 # Number of bad files from peer self.download_bytes = 0 # Bytes downloaded self.download_time = 0 # Time spent to download - def __getattr__(self, key): - if key == "hashfield": - self.has_hashfield = True - self.hashfield = PeerHashfield() - return self.hashfield - else: - return getattr(self, key) - def log(self, text): - if not config.verbose: - return # Only log if we are in debug mode if self.site: self.site.log.debug("%s:%s %s" % (self.ip, self.port, text)) else: @@ -68,80 +49,48 @@ class Peer(object): # Connect to host def connect(self, connection=None): - if self.reputation < -10: - self.reputation = -10 - if self.reputation > 10: - self.reputation = 10 - if self.connection: self.log("Getting connection (Closing %s)..." % self.connection) - self.connection.close("Connection change") + self.connection.close() else: - self.log("Getting connection (reputation: %s)..." % self.reputation) + self.log("Getting connection...") if connection: # Connection specified - self.log("Assigning connection %s" % connection) self.connection = connection - self.connection.sites += 1 else: # Try to find from connection pool or create new connection self.connection = None try: - if self.connection_server: - connection_server = self.connection_server - elif self.site: - connection_server = self.site.connection_server + if self.site: + self.connection = self.site.connection_server.getConnection(self.ip, self.port) else: - import main - connection_server = main.file_server - self.connection = connection_server.getConnection(self.ip, self.port, site=self.site, is_tracker_connection=self.is_tracker_connection) - self.reputation += 1 - self.connection.sites += 1 - except Exception as err: - self.onConnectionError("Getting connection error") + self.connection = sys.modules["main"].file_server.getConnection(self.ip, self.port) + + except Exception, err: + self.onConnectionError() self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed)) self.connection = None - return self.connection # Check if we have connection to peer def findConnection(self): if self.connection and self.connection.connected: # We have connection to peer return self.connection else: # Try to find from other sites connections - self.connection = self.site.connection_server.getConnection(self.ip, self.port, create=False, site=self.site) - if self.connection: - self.connection.sites += 1 + self.connection = self.site.connection_server.getConnection(self.ip, self.port, create=False) return self.connection def __str__(self): - if self.site: - return "Peer:%-12s of %s" % (self.ip, self.site.address_short) - else: - return "Peer:%-12s" % self.ip + return "Peer:%-12s" % self.ip def __repr__(self): return "<%s>" % self.__str__() def packMyAddress(self): - if self.ip.endswith(".onion"): - return helper.packOnionAddress(self.ip, self.port) - else: - return helper.packAddress(self.ip, self.port) + return helper.packAddress(self.ip, self.port) - # Found a peer from a source - def found(self, source="other"): - if self.reputation < 5: - if source == "tracker": - if self.ip.endswith(".onion"): - self.reputation += 1 - else: - self.reputation += 2 - elif source == "local": - self.reputation += 20 - - if source in ("tracker", "local"): - self.site.peers_recent.appendleft(self) + # Found a peer on tracker + def found(self): self.time_found = time.time() # Send a command to peer and return response value @@ -149,35 +98,27 @@ class Peer(object): if not self.connection or self.connection.closed: self.connect() if not self.connection: - self.onConnectionError("Reconnect error") + self.onConnectionError() return None # Connection failed - self.log("Send request: %s %s %s %s" % (params.get("site", ""), cmd, params.get("inner_path", ""), params.get("location", ""))) - for retry in range(1, 4): # Retry 3 times try: - if not self.connection: - raise Exception("No connection found") res = self.connection.request(cmd, params, stream_to) if not res: raise Exception("Send error") if "error" in res: self.log("%s error: %s" % (cmd, res["error"])) - self.onConnectionError("Response error") - break + self.onConnectionError() else: # Successful request, reset connection error num self.connection_error = 0 self.time_response = time.time() - if res: - return res - else: - raise Exception("Invalid response: %s" % res) - except Exception as err: + return res + except Exception, err: if type(err).__name__ == "Notify": # Greenlet killed by worker self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd)) break else: - self.onConnectionError("Request error") + self.onConnectionError() self.log( "%s (connection_error: %s, hash_failed: %s, retry: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed, retry) @@ -187,56 +128,61 @@ class Peer(object): return None # Failed after 4 retry # Get a file content from peer - def getFile(self, site, inner_path, file_size=None, pos_from=0, pos_to=None, streaming=False): - if file_size and file_size > 5 * 1024 * 1024: - max_read_size = 1024 * 1024 - else: - max_read_size = 512 * 1024 - - if pos_to: - read_bytes = min(max_read_size, pos_to - pos_from) - else: - read_bytes = max_read_size - - location = pos_from + def getFile(self, site, inner_path): + # Use streamFile if client supports it + if config.stream_downloads and self.connection and self.connection.handshake and self.connection.handshake["rev"] > 310: + return self.streamFile(site, inner_path) + location = 0 if config.use_tempfiles: buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b') else: - buff = io.BytesIO() + buff = StringIO() s = time.time() - while True: # Read in smaller parts - if config.stream_downloads or read_bytes > 256 * 1024 or streaming: - res = self.request("streamFile", {"site": site, "inner_path": inner_path, "location": location, "read_bytes": read_bytes, "file_size": file_size}, stream_to=buff) - if not res or "location" not in res: # Error - return False - else: - self.log("Send: %s" % inner_path) - res = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location, "read_bytes": read_bytes, "file_size": file_size}) - if not res or "location" not in res: # Error - return False - self.log("Recv: %s" % inner_path) - buff.write(res["body"]) - res["body"] = None # Save memory + while True: # Read in 512k parts + res = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location}) - if res["location"] == res["size"] or res["location"] == pos_to: # End of file + if not res or "body" not in res: # Error + return False + + buff.write(res["body"]) + res["body"] = None # Save memory + if res["location"] == res["size"]: # End of file break else: location = res["location"] - if pos_to: - read_bytes = min(max_read_size, pos_to - location) - if pos_to: - recv = pos_to - pos_from - else: - recv = res["location"] - - self.download_bytes += recv + self.download_bytes += res["location"] self.download_time += (time.time() - s) - if self.site: - self.site.settings["bytes_recv"] = self.site.settings.get("bytes_recv", 0) + recv - self.log("Downloaded: %s, pos: %s, read_bytes: %s" % (inner_path, buff.tell(), read_bytes)) + self.site.settings["bytes_recv"] = self.site.settings.get("bytes_recv", 0) + res["location"] + buff.seek(0) + return buff + + # Download file out of msgpack context to save memory and cpu + def streamFile(self, site, inner_path): + location = 0 + if config.use_tempfiles: + buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b') + else: + buff = StringIO() + + s = time.time() + while True: # Read in 512k parts + res = self.request("streamFile", {"site": site, "inner_path": inner_path, "location": location}, stream_to=buff) + + if not res: # Error + self.log("Invalid response: %s" % res) + return False + + if res["location"] == res["size"]: # End of file + break + else: + location = res["location"] + + self.download_bytes += res["location"] + self.download_time += (time.time() - s) + self.site.settings["bytes_recv"] = self.site.settings.get("bytes_recv", 0) + res["location"] buff.seek(0) return buff @@ -248,11 +194,11 @@ class Peer(object): with gevent.Timeout(10.0, False): # 10 sec timeout, don't raise exception res = self.request("ping") - if res and "body" in res and res["body"] == b"Pong!": + if res and "body" in res and res["body"] == "Pong!": response_time = time.time() - s break # All fine, exit from for loop # Timeout reached or bad response - self.onConnectionError("Ping timeout") + self.onConnectionError() self.connect() time.sleep(1) @@ -267,41 +213,18 @@ class Peer(object): def pex(self, site=None, need_num=5): if not site: site = self.site # If no site defined request peers for this site - - # give back 5 connectible peers - packed_peers = helper.packPeers(self.site.getConnectablePeers(5, allow_private=False)) - request = {"site": site.address, "peers": packed_peers["ipv4"], "need": need_num} - if packed_peers["onion"]: - request["peers_onion"] = packed_peers["onion"] - if packed_peers["ipv6"]: - request["peers_ipv6"] = packed_peers["ipv6"] - res = self.request("pex", request) + # give him/her 5 connectible peers + packed_peers = [peer.packMyAddress() for peer in self.site.getConnectablePeers(5)] + res = self.request("pex", {"site": site.address, "peers": packed_peers, "need": need_num}) if not res or "error" in res: return False added = 0 - - # Remove unsupported peer types - if "peers_ipv6" in res and self.connection and "ipv6" not in self.connection.server.supported_ip_types: - del res["peers_ipv6"] - - if "peers_onion" in res and self.connection and "onion" not in self.connection.server.supported_ip_types: - del res["peers_onion"] - - # Add IPv4 + IPv6 - for peer in itertools.chain(res.get("peers", []), res.get("peers_ipv6", [])): + for peer in res.get("peers", []): address = helper.unpackAddress(peer) - if site.addPeer(*address, source="pex"): + if site.addPeer(*address): added += 1 - - # Add Onion - for peer in res.get("peers_onion", []): - address = helper.unpackOnionAddress(peer) - if site.addPeer(*address, source="pex"): - added += 1 - if added: self.log("Added peers using pex: %s" % added) - return added # List modified files since the date @@ -310,15 +233,15 @@ class Peer(object): return self.request("listModified", {"since": since, "site": self.site.address}) def updateHashfield(self, force=False): - # Don't update hashfield again in 5 min - if self.time_hashfield and time.time() - self.time_hashfield < 5 * 60 and not force: + # Don't update hashfield again in 15 min + if self.time_hashfield and time.time() - self.time_hashfield > 60 * 15 and not force: return False self.time_hashfield = time.time() res = self.request("getHashfield", {"site": self.site.address}) - if not res or "error" in res or "hashfield_raw" not in res: + if not res or "error" in res: return False - self.hashfield.replaceFromBytes(res["hashfield_raw"]) + self.hashfield.replaceFromString(res["hashfield_raw"]) return self.hashfield @@ -326,31 +249,9 @@ class Peer(object): # Return: {hash1: ["ip:port", "ip:port",...],...} def findHashIds(self, hash_ids): res = self.request("findHashIds", {"site": self.site.address, "hash_ids": hash_ids}) - if not res or "error" in res or type(res) is not dict: + if not res or "error" in res: return False - - back = collections.defaultdict(list) - - for ip_type in ["ipv4", "ipv6", "onion"]: - if ip_type == "ipv4": - key = "peers" - else: - key = "peers_%s" % ip_type - for hash, peers in list(res.get(key, {}).items())[0:30]: - if ip_type == "onion": - unpacker_func = helper.unpackOnionAddress - else: - unpacker_func = helper.unpackAddress - - back[hash] += list(map(unpacker_func, peers)) - - for hash in res.get("my", []): - if self.connection: - back[hash].append((self.connection.ip, self.connection.port)) - else: - back[hash].append((self.ip, self.port)) - - return back + return {key: map(helper.unpackAddress, val) for key, val in res["peers"].iteritems()} # Send my hashfield to peer # Return: True if sent @@ -360,50 +261,28 @@ class Peer(object): if self.time_my_hashfield_sent and self.site.content_manager.hashfield.time_changed <= self.time_my_hashfield_sent: return False # Peer already has the latest hashfield - res = self.request("setHashfield", {"site": self.site.address, "hashfield_raw": self.site.content_manager.hashfield.tobytes()}) + res = self.request("setHashfield", {"site": self.site.address, "hashfield_raw": self.site.content_manager.hashfield.tostring()}) if not res or "error" in res: return False else: self.time_my_hashfield_sent = time.time() return True - def publish(self, address, inner_path, body, modified, diffs=[]): - if len(body) > 10 * 1024 and self.connection and self.connection.handshake.get("rev", 0) >= 4095: - # To save bw we don't push big content.json to peers - body = b"" - - return self.request("update", { - "site": address, - "inner_path": inner_path, - "body": body, - "modified": modified, - "diffs": diffs - }) - # Stop and remove from site - def remove(self, reason="Removing"): + def remove(self): self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed)) if self.site and self.key in self.site.peers: del(self.site.peers[self.key]) - - if self.site and self in self.site.peers_recent: - self.site.peers_recent.remove(self) - if self.connection: - self.connection.close(reason) + self.connection.close() # - EVENTS - # On connection error - def onConnectionError(self, reason="Unknown"): + def onConnectionError(self): self.connection_error += 1 - if self.site and len(self.site.peers) > 200: - limit = 3 - else: - limit = 6 - self.reputation -= 1 - if self.connection_error >= limit: # Dead peer - self.remove("Peer connection: %s" % reason) + if self.connection_error >= 3: # Dead peer + self.remove() # Done working with peer def onWorkerDone(self): diff --git a/src/Peer/PeerHashfield.py b/src/Peer/PeerHashfield.py index fdd414c8..7e4f9184 100644 --- a/src/Peer/PeerHashfield.py +++ b/src/Peer/PeerHashfield.py @@ -3,7 +3,7 @@ import time class PeerHashfield(object): - __slots__ = ("storage", "time_changed", "append", "remove", "tobytes", "frombytes", "__len__", "__iter__") + __slots__ = ("storage", "time_changed", "append", "remove", "tostring", "fromstring", "__len__", "__iter__") def __init__(self): self.storage = self.createStorage() self.time_changed = time.time() @@ -12,8 +12,8 @@ class PeerHashfield(object): storage = array.array("H") self.append = storage.append self.remove = storage.remove - self.tobytes = storage.tobytes - self.frombytes = storage.frombytes + self.tostring = storage.tostring + self.fromstring = storage.fromstring self.__len__ = storage.__len__ self.__iter__ = storage.__iter__ return storage @@ -44,23 +44,15 @@ class PeerHashfield(object): else: return False - def removeHashId(self, hash_id): - if hash_id in self.storage: - self.storage.remove(hash_id) - self.time_changed = time.time() - return True - else: - return False - def getHashId(self, hash): return int(hash[0:4], 16) def hasHash(self, hash): return int(hash[0:4], 16) in self.storage - def replaceFromBytes(self, hashfield_raw): + def replaceFromString(self, hashfield_raw): self.storage = self.createStorage() - self.storage.frombytes(hashfield_raw) + self.storage.fromstring(hashfield_raw) self.time_changed = time.time() if __name__ == "__main__": @@ -68,8 +60,8 @@ if __name__ == "__main__": s = time.time() for i in range(10000): field.appendHashId(i) - print(time.time()-s) + print time.time()-s s = time.time() for i in range(10000): field.hasHash("AABB") - print(time.time()-s) \ No newline at end of file + print time.time()-s \ No newline at end of file diff --git a/src/Peer/PeerPortchecker.py b/src/Peer/PeerPortchecker.py deleted file mode 100644 index 3c4daecf..00000000 --- a/src/Peer/PeerPortchecker.py +++ /dev/null @@ -1,189 +0,0 @@ -import logging -import urllib.request -import urllib.parse -import re -import time - -from Debug import Debug -from util import UpnpPunch - - -class PeerPortchecker(object): - checker_functions = { - "ipv4": ["checkIpfingerprints", "checkCanyouseeme"], - "ipv6": ["checkMyaddr", "checkIpv6scanner"] - } - def __init__(self, file_server): - self.log = logging.getLogger("PeerPortchecker") - self.upnp_port_opened = False - self.file_server = file_server - - def requestUrl(self, url, post_data=None): - if type(post_data) is dict: - post_data = urllib.parse.urlencode(post_data).encode("utf8") - req = urllib.request.Request(url, post_data) - req.add_header("Referer", url) - req.add_header("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11") - req.add_header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8") - return urllib.request.urlopen(req, timeout=20.0) - - def portOpen(self, port): - self.log.info("Trying to open port using UpnpPunch...") - - try: - UpnpPunch.ask_to_open_port(port, 'ZeroNet', retries=3, protos=["TCP"]) - self.upnp_port_opened = True - except Exception as err: - self.log.warning("UpnpPunch run error: %s" % Debug.formatException(err)) - return False - - return True - - def portClose(self, port): - return UpnpPunch.ask_to_close_port(port, protos=["TCP"]) - - def portCheck(self, port, ip_type="ipv4"): - checker_functions = self.checker_functions[ip_type] - - for func_name in checker_functions: - func = getattr(self, func_name) - s = time.time() - try: - res = func(port) - if res: - self.log.info( - "Checked port %s (%s) using %s result: %s in %.3fs" % - (port, ip_type, func_name, res, time.time() - s) - ) - time.sleep(0.1) - if res["opened"] and not self.file_server.had_external_incoming: - res["opened"] = False - self.log.warning("Port %s:%s looks opened, but no incoming connection" % (res["ip"], port)) - break - except Exception as err: - self.log.warning( - "%s check error: %s in %.3fs" % - (func_name, Debug.formatException(err), time.time() - s) - ) - res = {"ip": None, "opened": False} - - return res - - def checkCanyouseeme(self, port): - data = urllib.request.urlopen("https://www.canyouseeme.org/", b"ip=1.1.1.1&port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8") - - message = re.match(r'.*

    (.*?)

    ', data, re.DOTALL).group(1) - message = re.sub(r"<.*?>", "", message.replace("
    ", " ").replace(" ", " ")) # Strip http tags - - match = re.match(r".*service on (.*?) on", message) - if match: - ip = match.group(1) - else: - raise Exception("Invalid response: %s" % message) - - if "Success" in message: - return {"ip": ip, "opened": True} - elif "Error" in message: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message) - - def checkIpfingerprints(self, port): - data = self.requestUrl("https://www.ipfingerprints.com/portscan.php").read().decode("utf8") - ip = re.match(r'.*name="remoteHost".*?value="(.*?)"', data, re.DOTALL).group(1) - - post_data = { - "remoteHost": ip, "start_port": port, "end_port": port, - "normalScan": "Yes", "scan_type": "connect2", "ping_type": "none" - } - message = self.requestUrl("https://www.ipfingerprints.com/scripts/getPortsInfo.php", post_data).read().decode("utf8") - - if "open" in message: - return {"ip": ip, "opened": True} - elif "filtered" in message or "closed" in message: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message) - - def checkMyaddr(self, port): - url = "http://ipv6.my-addr.com/online-ipv6-port-scan.php" - - data = self.requestUrl(url).read().decode("utf8") - - ip = re.match(r'.*Your IP address is:[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1) - - post_data = {"addr": ip, "ports_selected": "", "ports_list": port} - data = self.requestUrl(url, post_data).read().decode("utf8") - - message = re.match(r".*(.*?)
    ", data, re.DOTALL).group(1) - - if "ok.png" in message: - return {"ip": ip, "opened": True} - elif "fail.png" in message: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message) - - def checkIpv6scanner(self, port): - url = "http://www.ipv6scanner.com/cgi-bin/main.py" - - data = self.requestUrl(url).read().decode("utf8") - - ip = re.match(r'.*Your IP address is[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1) - - post_data = {"host": ip, "scanType": "1", "port": port, "protocol": "tcp", "authorized": "yes"} - data = self.requestUrl(url, post_data).read().decode("utf8") - - message = re.match(r".*(.*?)
    ", data, re.DOTALL).group(1) - message_text = re.sub("<.*?>", " ", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags - - if "OPEN" in message_text: - return {"ip": ip, "opened": True} - elif "CLOSED" in message_text or "FILTERED" in message_text: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message_text) - - def checkPortchecker(self, port): # Not working: Forbidden - data = self.requestUrl("https://portchecker.co").read().decode("utf8") - csrf = re.match(r'.*name="_csrf" value="(.*?)"', data, re.DOTALL).group(1) - - data = self.requestUrl("https://portchecker.co", {"port": port, "_csrf": csrf}).read().decode("utf8") - message = re.match(r'.*
    (.*?)
    ', data, re.DOTALL).group(1) - message = re.sub(r"<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags - - match = re.match(r".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) - if match: - ip = match.group(1) - else: - raise Exception("Invalid response: %s" % message) - - if "open" in message: - return {"ip": ip, "opened": True} - elif "closed" in message: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message) - - def checkSubnetonline(self, port): # Not working: Invalid response - url = "https://www.subnetonline.com/pages/ipv6-network-tools/online-ipv6-port-scanner.php" - - data = self.requestUrl(url).read().decode("utf8") - - ip = re.match(r'.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL).group(1) - token = re.match(r'.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1) - - post_data = {"host": ip, "port": port, "allow": "on", "token": token, "submit": "Scanning.."} - data = self.requestUrl(url, post_data).read().decode("utf8") - - print(post_data, data) - - message = re.match(r".*
    (.*?)
    ", data, re.DOTALL).group(1) - message = re.sub(r"<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags - - if "online" in message: - return {"ip": ip, "opened": True} - elif "closed" in message: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message) diff --git a/src/Peer/__init__.py b/src/Peer/__init__.py index e73c58c5..3e92827f 100644 --- a/src/Peer/__init__.py +++ b/src/Peer/__init__.py @@ -1,2 +1,2 @@ -from .Peer import Peer -from .PeerHashfield import PeerHashfield +from Peer import Peer +from PeerHashfield import PeerHashfield diff --git a/src/Plugin/PluginManager.py b/src/Plugin/PluginManager.py index 56540e60..c1069da7 100644 --- a/src/Plugin/PluginManager.py +++ b/src/Plugin/PluginManager.py @@ -1,217 +1,58 @@ import logging import os import sys -import shutil -import time -from collections import defaultdict - -import importlib -import json from Debug import Debug from Config import config -import plugins class PluginManager: + def __init__(self): self.log = logging.getLogger("PluginManager") - self.path_plugins = None - if plugins.__file__: - self.path_plugins = os.path.dirname(os.path.abspath(plugins.__file__)); - self.path_installed_plugins = config.data_dir + "/__plugins__" - self.plugins = defaultdict(list) # Registered plugins (key: class name, value: list of plugins for class) - self.subclass_order = {} # Record the load order of the plugins, to keep it after reload - self.pluggable = {} + self.plugin_path = "plugins" # Plugin directory + self.plugins = {} # Registered plugins (key: class name, value: list of plugins for class) self.plugin_names = [] # Loaded plugin names - self.plugins_updated = {} # List of updated plugins since restart - self.plugins_rev = {} # Installed plugins revision numbers - self.after_load = [] # Execute functions after loaded plugins - self.function_flags = {} # Flag function for permissions - self.reloading = False - self.config_path = config.data_dir + "/plugins.json" - self.loadConfig() - self.config.setdefault("builtin", {}) - - if self.path_plugins: - sys.path.append(os.path.join(os.getcwd(), self.path_plugins)) - self.migratePlugins() + sys.path.append(self.plugin_path) if config.debug: # Auto reload Plugins on file change from Debug import DebugReloader - DebugReloader.watcher.addCallback(self.reloadPlugins) - - def loadConfig(self): - if os.path.isfile(self.config_path): - try: - self.config = json.load(open(self.config_path, encoding="utf8")) - except Exception as err: - self.log.error("Error loading %s: %s" % (self.config_path, err)) - self.config = {} - else: - self.config = {} - - def saveConfig(self): - f = open(self.config_path, "w", encoding="utf8") - json.dump(self.config, f, ensure_ascii=False, sort_keys=True, indent=2) - - def migratePlugins(self): - for dir_name in os.listdir(self.path_plugins): - if dir_name == "Mute": - self.log.info("Deleting deprecated/renamed plugin: %s" % dir_name) - shutil.rmtree("%s/%s" % (self.path_plugins, dir_name)) + DebugReloader(self.reloadPlugins) # -- Load / Unload -- - def listPlugins(self, list_disabled=False): - plugins = [] - for dir_name in sorted(os.listdir(self.path_plugins)): - dir_path = os.path.join(self.path_plugins, dir_name) - plugin_name = dir_name.replace("disabled-", "") - if dir_name.startswith("disabled"): - is_enabled = False - else: - is_enabled = True - - plugin_config = self.config["builtin"].get(plugin_name, {}) - if "enabled" in plugin_config: - is_enabled = plugin_config["enabled"] - - if dir_name == "__pycache__" or not os.path.isdir(dir_path): - continue # skip - if dir_name.startswith("Debug") and not config.debug: - continue # Only load in debug mode if module name starts with Debug - if not is_enabled and not list_disabled: - continue # Dont load if disabled - - plugin = {} - plugin["source"] = "builtin" - plugin["name"] = plugin_name - plugin["dir_name"] = dir_name - plugin["dir_path"] = dir_path - plugin["inner_path"] = plugin_name - plugin["enabled"] = is_enabled - plugin["rev"] = config.rev - plugin["loaded"] = plugin_name in self.plugin_names - plugins.append(plugin) - - plugins += self.listInstalledPlugins(list_disabled) - return plugins - - def listInstalledPlugins(self, list_disabled=False): - plugins = [] - - for address, site_plugins in sorted(self.config.items()): - if address == "builtin": - continue - for plugin_inner_path, plugin_config in sorted(site_plugins.items()): - is_enabled = plugin_config.get("enabled", False) - if not is_enabled and not list_disabled: - continue - plugin_name = os.path.basename(plugin_inner_path) - - dir_path = "%s/%s/%s" % (self.path_installed_plugins, address, plugin_inner_path) - - plugin = {} - plugin["source"] = address - plugin["name"] = plugin_name - plugin["dir_name"] = plugin_name - plugin["dir_path"] = dir_path - plugin["inner_path"] = plugin_inner_path - plugin["enabled"] = is_enabled - plugin["rev"] = plugin_config.get("rev", 0) - plugin["loaded"] = plugin_name in self.plugin_names - plugins.append(plugin) - - return plugins - # Load all plugin def loadPlugins(self): - all_loaded = True - s = time.time() - if self.path_plugins is None: - return - for plugin in self.listPlugins(): - self.log.debug("Loading plugin: %s (%s)" % (plugin["name"], plugin["source"])) - if plugin["source"] != "builtin": - self.plugins_rev[plugin["name"]] = plugin["rev"] - site_plugin_dir = os.path.dirname(plugin["dir_path"]) - if site_plugin_dir not in sys.path: - sys.path.append(site_plugin_dir) + for dir_name in os.listdir(self.plugin_path): + dir_path = os.path.join(self.plugin_path, dir_name) + if dir_name.startswith("disabled"): + continue # Dont load if disabled + if not os.path.isdir(dir_path): + continue # Dont load if not dir + if dir_name.startswith("Debug") and not config.debug: + continue # Only load in debug mode if module name starts with Debug + self.log.debug("Loading plugin: %s" % dir_name) try: - sys.modules[plugin["name"]] = __import__(plugin["dir_name"]) - except Exception as err: - self.log.error("Plugin %s load error: %s" % (plugin["name"], Debug.formatException(err))) - all_loaded = False - if plugin["name"] not in self.plugin_names: - self.plugin_names.append(plugin["name"]) - - self.log.debug("Plugins loaded in %.3fs" % (time.time() - s)) - for func in self.after_load: - func() - return all_loaded + __import__(dir_name) + except Exception, err: + self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err))) + if dir_name not in self.plugin_names: + self.plugin_names.append(dir_name) # Reload all plugins def reloadPlugins(self): - self.reloading = True - self.after_load = [] - self.plugins_before = self.plugins - self.plugins = defaultdict(list) # Reset registered plugins - for module_name, module in list(sys.modules.items()): - if not module or not getattr(module, "__file__", None): - continue - if self.path_plugins not in module.__file__ and self.path_installed_plugins not in module.__file__: - continue - - if "allow_reload" in dir(module) and not module.allow_reload: # Reload disabled - # Re-add non-reloadable plugins - for class_name, classes in self.plugins_before.items(): - for c in classes: - if c.__module__ != module.__name__: - continue - self.plugins[class_name].append(c) - else: - try: - importlib.reload(module) - except Exception as err: - self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err))) + self.plugins = {} # Reset registered plugins + for module_name, module in sys.modules.items(): + if module and "__file__" in dir(module) and self.plugin_path in module.__file__: # Module file within plugin_path + if "allow_reload" not in dir(module) or module.allow_reload: # Check if reload disabled + try: + reload(module) + except Exception, err: + self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err))) self.loadPlugins() # Load new plugins - # Change current classes in memory - import gc - patched = {} - for class_name, classes in self.plugins.items(): - classes = classes[:] # Copy the current plugins - classes.reverse() - base_class = self.pluggable[class_name] # Original class - classes.append(base_class) # Add the class itself to end of inherience line - plugined_class = type(class_name, tuple(classes), dict()) # Create the plugined class - for obj in gc.get_objects(): - if type(obj).__name__ == class_name: - obj.__class__ = plugined_class - patched[class_name] = patched.get(class_name, 0) + 1 - self.log.debug("Patched objects: %s" % patched) - - # Change classes in modules - patched = {} - for class_name, classes in self.plugins.items(): - for module_name, module in list(sys.modules.items()): - if class_name in dir(module): - if "__class__" not in dir(getattr(module, class_name)): # Not a class - continue - base_class = self.pluggable[class_name] - classes = self.plugins[class_name][:] - classes.reverse() - classes.append(base_class) - plugined_class = type(class_name, tuple(classes), dict()) - setattr(module, class_name, plugined_class) - patched[class_name] = patched.get(class_name, 0) + 1 - - self.log.debug("Patched modules: %s" % patched) - self.reloading = False - plugin_manager = PluginManager() # Singletone @@ -222,21 +63,8 @@ plugin_manager = PluginManager() # Singletone def acceptPlugins(base_class): class_name = base_class.__name__ - plugin_manager.pluggable[class_name] = base_class if class_name in plugin_manager.plugins: # Has plugins classes = plugin_manager.plugins[class_name][:] # Copy the current plugins - - # Restore the subclass order after reload - if class_name in plugin_manager.subclass_order: - classes = sorted( - classes, - key=lambda key: - plugin_manager.subclass_order[class_name].index(str(key)) - if str(key) in plugin_manager.subclass_order[class_name] - else 9999 - ) - plugin_manager.subclass_order[class_name] = list(map(str, classes)) - classes.reverse() classes.append(base_class) # Add the class itself to end of inherience line plugined_class = type(class_name, tuple(classes), dict()) # Create the plugined class @@ -248,13 +76,6 @@ def acceptPlugins(base_class): # Register plugin to class name decorator def registerTo(class_name): - if config.debug and not plugin_manager.reloading: - import gc - for obj in gc.get_objects(): - if type(obj).__name__ == class_name: - raise Exception("Class %s instances already present in memory" % class_name) - break - plugin_manager.log.debug("New plugin registered to: %s" % class_name) if class_name not in plugin_manager.plugins: plugin_manager.plugins[class_name] = [] @@ -265,11 +86,6 @@ def registerTo(class_name): return classDecorator -def afterLoad(func): - plugin_manager.after_load.append(func) - return func - - # - Example usage - if __name__ == "__main__": @@ -289,4 +105,4 @@ if __name__ == "__main__": else: return "Can't route to", path - print(Request().route("MainPage")) + print Request().route("MainPage") diff --git a/src/Site/Site.py b/src/Site/Site.py index d6179307..4bf5ff3c 100644 --- a/src/Site/Site.py +++ b/src/Site/Site.py @@ -1,81 +1,71 @@ import os import json import logging +import hashlib import re import time import random import sys -import hashlib -import collections -import base64 +import binascii +import struct +import socket +import urllib +import urllib2 import gevent -import gevent.pool import util +from lib import bencode +from lib.subtl.subtl import UdpTrackerClient from Config import config from Peer import Peer from Worker import WorkerManager from Debug import Debug from Content import ContentManager -from .SiteStorage import SiteStorage +from SiteStorage import SiteStorage from Crypt import CryptHash from util import helper -from util import Diff -from util import GreenletManager -from Plugin import PluginManager -from File import FileServer -from .SiteAnnouncer import SiteAnnouncer -from . import SiteManager +import SiteManager -@PluginManager.acceptPlugins -class Site(object): +class Site: - def __init__(self, address, allow_create=True, settings=None): - self.address = str(re.sub("[^A-Za-z0-9]", "", address)) # Make sure its correct address - self.address_hash = hashlib.sha256(self.address.encode("ascii")).digest() - self.address_sha1 = hashlib.sha1(self.address.encode("ascii")).digest() + def __init__(self, address, allow_create=True): + self.address = re.sub("[^A-Za-z0-9]", "", address) # Make sure its correct address self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging self.log = logging.getLogger("Site:%s" % self.address_short) self.addEventListeners() self.content = None # Load content.json self.peers = {} # Key: ip:port, Value: Peer.Peer - self.peers_recent = collections.deque(maxlen=150) self.peer_blacklist = SiteManager.peer_blacklist # Ignore this peers (eg. myself) - self.greenlet_manager = GreenletManager.GreenletManager() # Running greenlets + self.time_announce = 0 # Last announce time to tracker + self.last_tracker_id = random.randint(0, 10) # Last announced tracker id self.worker_manager = WorkerManager(self) # Handle site download from other peers self.bad_files = {} # SHA check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept) self.content_updated = None # Content.js update time self.notifications = [] # Pending notifications displayed once on page load [error|ok|info, message, timeout] self.page_requested = False # Page viewed in browser - self.websockets = [] # Active site websocket connections - self.connection_server = None - self.loadSettings(settings) # Load settings from sites.json self.storage = SiteStorage(self, allow_create=allow_create) # Save and load site files - self.content_manager = ContentManager(self) - self.content_manager.loadContents() # Load content.json files - if "main" in sys.modules: # import main has side-effects, breaks tests - import main - if "file_server" in dir(main): # Use global file server by default if possible - self.connection_server = main.file_server - else: - main.file_server = FileServer() - self.connection_server = main.file_server + self.loadSettings() # Load settings from sites.json + self.content_manager = ContentManager(self) # Load contents + self.connection_server = None + if "main" in sys.modules and "file_server" in dir(sys.modules["main"]): # Use global file server by default if possible + self.connection_server = sys.modules["main"].file_server else: - self.connection_server = FileServer() - - self.announcer = SiteAnnouncer(self) # Announce and get peer list from other nodes + self.connection_server = None + if not self.settings.get("auth_key"): # To auth user in site (Obsolete, will be removed) + self.settings["auth_key"] = CryptHash.random() + self.log.debug("New auth key: %s" % self.settings["auth_key"]) + self.saveSettings() if not self.settings.get("wrapper_key"): # To auth websocket permissions self.settings["wrapper_key"] = CryptHash.random() self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"]) + self.saveSettings() - if not self.settings.get("ajax_key"): # To auth websocket permissions - self.settings["ajax_key"] = CryptHash.random() - self.log.debug("New ajax key: %s" % self.settings["ajax_key"]) + self.websockets = [] # Active site websocket connections def __str__(self): return "Site %s" % self.address_short @@ -84,203 +74,91 @@ class Site(object): return "<%s>" % self.__str__() # Load site settings from data/sites.json - def loadSettings(self, settings=None): - if not settings: - settings = json.load(open("%s/sites.json" % config.data_dir)).get(self.address) - if settings: - self.settings = settings - if "cache" not in settings: - settings["cache"] = {} - if "size_files_optional" not in settings: - settings["size_optional"] = 0 - if "optional_downloaded" not in settings: - settings["optional_downloaded"] = 0 - if "downloaded" not in settings: - settings["downloaded"] = settings.get("added") - self.bad_files = settings["cache"].get("bad_files", {}) - settings["cache"]["bad_files"] = {} - # Give it minimum 10 tries after restart - for inner_path in self.bad_files: - self.bad_files[inner_path] = min(self.bad_files[inner_path], 20) + def loadSettings(self): + sites_settings = json.load(open("%s/sites.json" % config.data_dir)) + if self.address in sites_settings: + self.settings = sites_settings[self.address] else: - self.settings = { - "own": False, "serving": True, "permissions": [], "cache": {"bad_files": {}}, "size_files_optional": 0, - "added": int(time.time()), "downloaded": None, "optional_downloaded": 0, "size_optional": 0 - } # Default - if config.download_optional == "auto": - self.settings["autodownloadoptional"] = True - - # Add admin permissions to homepage - if self.address in (config.homepage, config.updatesite) and "ADMIN" not in self.settings["permissions"]: - self.settings["permissions"].append("ADMIN") - + if self.address == config.homepage: # Add admin permissions to homepage + permissions = ["ADMIN"] + else: + permissions = [] + self.settings = {"own": False, "serving": True, "permissions": permissions} # Default return # Save site settings to data/sites.json def saveSettings(self): - if not SiteManager.site_manager.sites: - SiteManager.site_manager.sites = {} - if not SiteManager.site_manager.sites.get(self.address): - SiteManager.site_manager.sites[self.address] = self - SiteManager.site_manager.load(False) - SiteManager.site_manager.saveDelayed() - - def isServing(self): - if config.offline: - return False - else: - return self.settings["serving"] - - def getSettingsCache(self): - back = {} - back["bad_files"] = self.bad_files - back["hashfield"] = base64.b64encode(self.content_manager.hashfield.tobytes()).decode("ascii") - return back + sites_settings = json.load(open("%s/sites.json" % config.data_dir)) + sites_settings[self.address] = self.settings + helper.atomicWrite("%s/sites.json" % config.data_dir, json.dumps(sites_settings, indent=2, sort_keys=True)) # Max site size in MB def getSizeLimit(self): - return self.settings.get("size_limit", int(config.size_limit)) + return self.settings.get("size_limit", config.size_limit) # Next size limit based on current size def getNextSizeLimit(self): - size_limits = [25, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000] + size_limits = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000] size = self.settings.get("size", 0) for size_limit in size_limits: if size * 1.2 < size_limit * 1024 * 1024: return size_limit return 999999 - def isAddedRecently(self): - return time.time() - self.settings.get("added", 0) < 60 * 60 * 24 - # Download all file from content.json - def downloadContent(self, inner_path, download_files=True, peer=None, check_modifications=False, diffs={}): + def downloadContent(self, inner_path, download_files=True, peer=None, check_modifications=False): s = time.time() - if config.verbose: - self.log.debug( - "DownloadContent %s: Started. (download_files: %s, check_modifications: %s, diffs: %s)..." % - (inner_path, download_files, check_modifications, diffs.keys()) - ) - - if not inner_path.endswith("content.json"): - return False - + self.log.debug("Downloading %s..." % inner_path) found = self.needFile(inner_path, update=self.bad_files.get(inner_path)) content_inner_dir = helper.getDirname(inner_path) if not found: - self.log.debug("DownloadContent %s: Download failed, check_modifications: %s" % (inner_path, check_modifications)) + self.log.debug("Download %s failed, check_modifications: %s" % (inner_path, check_modifications)) if check_modifications: # Download failed, but check modifications if its succed later self.onFileDone.once(lambda file_name: self.checkModifications(0), "check_modifications") return False # Could not download content.json - if config.verbose: - self.log.debug("DownloadContent got %s" % inner_path) - sub_s = time.time() - + self.log.debug("Got %s" % inner_path) changed, deleted = self.content_manager.loadContent(inner_path, load_includes=False) - if config.verbose: - self.log.debug("DownloadContent %s: loadContent done in %.3fs" % (inner_path, time.time() - sub_s)) - - if inner_path == "content.json": - self.saveSettings() - - if peer: # Update last received update from peer to prevent re-sending the same update to it - peer.last_content_json_update = self.content_manager.contents[inner_path]["modified"] - - # Verify size limit - if inner_path == "content.json": - site_size_limit = self.getSizeLimit() * 1024 * 1024 - content_size = len(json.dumps(self.content_manager.contents[inner_path], indent=1)) + sum([file["size"] for file in list(self.content_manager.contents[inner_path].get("files", {}).values()) if file["size"] >= 0]) # Size of new content - if site_size_limit < content_size: - # Not enought don't download anything - self.log.debug("DownloadContent Size limit reached (site too big please increase limit): %.2f MB > %.2f MB" % (content_size / 1024 / 1024, site_size_limit / 1024 / 1024)) - return False - # Start download files file_threads = [] if download_files: - for file_relative_path in list(self.content_manager.contents[inner_path].get("files", {}).keys()): + for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys(): file_inner_path = content_inner_dir + file_relative_path - - # Try to diff first - diff_success = False - diff_actions = diffs.get(file_relative_path) - if diff_actions and self.bad_files.get(file_inner_path): - try: - s = time.time() - new_file = Diff.patch(self.storage.open(file_inner_path, "rb"), diff_actions) - new_file.seek(0) - time_diff = time.time() - s - - s = time.time() - diff_success = self.content_manager.verifyFile(file_inner_path, new_file) - time_verify = time.time() - s - - if diff_success: - s = time.time() - new_file.seek(0) - self.storage.write(file_inner_path, new_file) - time_write = time.time() - s - - s = time.time() - self.onFileDone(file_inner_path) - time_on_done = time.time() - s - - self.log.debug( - "DownloadContent Patched successfully: %s (diff: %.3fs, verify: %.3fs, write: %.3fs, on_done: %.3fs)" % - (file_inner_path, time_diff, time_verify, time_write, time_on_done) - ) - except Exception as err: - self.log.debug("DownloadContent Failed to patch %s: %s" % (file_inner_path, err)) - diff_success = False - - if not diff_success: - # Start download and dont wait for finish, return the event - res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer) - if res is not True and res is not False: # Need downloading and file is allowed - file_threads.append(res) # Append evt + # Start download and dont wait for finish, return the event + res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer) + if res is not True and res is not False: # Need downloading and file is allowed + file_threads.append(res) # Append evt # Optionals files if inner_path == "content.json": gevent.spawn(self.updateHashfield) - for file_relative_path in list(self.content_manager.contents[inner_path].get("files_optional", {}).keys()): - file_inner_path = content_inner_dir + file_relative_path - if file_inner_path not in changed and not self.bad_files.get(file_inner_path): - continue - if not self.isDownloadable(file_inner_path): - continue - # Start download and dont wait for finish, return the event - res = self.pooledNeedFile( - file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer - ) - if res is not True and res is not False: # Need downloading and file is allowed - file_threads.append(res) # Append evt + if self.settings.get("autodownloadoptional"): + for file_relative_path in self.content_manager.contents[inner_path].get("files_optional", {}).keys(): + file_inner_path = content_inner_dir + file_relative_path + # Start download and dont wait for finish, return the event + res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer) + if res is not True and res is not False: # Need downloading and file is allowed + file_threads.append(res) # Append evt # Wait for includes download include_threads = [] - for file_relative_path in list(self.content_manager.contents[inner_path].get("includes", {}).keys()): + for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys(): file_inner_path = content_inner_dir + file_relative_path include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer) include_threads.append(include_thread) - if config.verbose: - self.log.debug("DownloadContent %s: Downloading %s includes..." % (inner_path, len(include_threads))) + self.log.debug("%s: Downloading %s includes..." % (inner_path, len(include_threads))) gevent.joinall(include_threads) - if config.verbose: - self.log.debug("DownloadContent %s: Includes download ended" % inner_path) + self.log.debug("%s: Includes download ended" % inner_path) if check_modifications: # Check if every file is up-to-date self.checkModifications(0) - if config.verbose: - self.log.debug("DownloadContent %s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed))) + self.log.debug("%s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed))) gevent.joinall(file_threads) - if config.verbose: - self.log.debug("DownloadContent %s: ended in %.3fs (tasks left: %s)" % ( - inner_path, time.time() - s, len(self.worker_manager.tasks) - )) + self.log.debug("%s: DownloadContent ended in %.2fs" % (inner_path, time.time() - s)) return True @@ -288,59 +166,22 @@ class Site(object): def getReachableBadFiles(self): if not self.bad_files: return False - return [bad_file for bad_file, retry in self.bad_files.items() if retry < 3] + return [bad_file for bad_file, retry in self.bad_files.iteritems() if retry < 3] # Retry download bad files def retryBadFiles(self, force=False): - self.checkBadFiles() - - self.log.debug("Retry %s bad files" % len(self.bad_files)) - content_inner_paths = [] - file_inner_paths = [] - - for bad_file, tries in list(self.bad_files.items()): - if force or random.randint(0, min(40, tries)) < 4: # Larger number tries = less likely to check every 15min - if bad_file.endswith("content.json"): - content_inner_paths.append(bad_file) - else: - file_inner_paths.append(bad_file) - - if content_inner_paths: - self.pooledDownloadContent(content_inner_paths, only_if_bad=True) - - if file_inner_paths: - self.pooledDownloadFile(file_inner_paths, only_if_bad=True) - - def checkBadFiles(self): - for bad_file in list(self.bad_files.keys()): - file_info = self.content_manager.getFileInfo(bad_file) - if bad_file.endswith("content.json"): - if file_info is False and bad_file != "content.json": - del self.bad_files[bad_file] - self.log.debug("No info for file: %s, removing from bad_files" % bad_file) - else: - if file_info is False or not file_info.get("size"): - del self.bad_files[bad_file] - self.log.debug("No info or size for file: %s, removing from bad_files" % bad_file) + for bad_file, tries in self.bad_files.iteritems(): + if force or random.randint(0, min(20, tries)) == 0: # Larger number tries = less likely to check every 15min + self.needFile(bad_file, update=True, blocking=False) # Download all files of the site @util.Noparallel(blocking=False) - def download(self, check_size=False, blind_includes=False, retry_bad_files=True): - if not self.connection_server: - self.log.debug("No connection server found, skipping download") - return False - - s = time.time() + def download(self, check_size=False, blind_includes=False): self.log.debug( - "Start downloading, bad_files: %s, check_size: %s, blind_includes: %s, isAddedRecently: %s" % - (self.bad_files, check_size, blind_includes, self.isAddedRecently()) + "Start downloading, bad_files: %s, check_size: %s, blind_includes: %s" % + (self.bad_files, check_size, blind_includes) ) - - if self.isAddedRecently(): - gevent.spawn(self.announce, mode="start", force=True) - else: - gevent.spawn(self.announce, mode="update") - + gevent.spawn(self.announce) if check_size: # Check the size first valid = self.downloadContent("content.json", download_files=False) # Just download content.json files if not valid: @@ -349,269 +190,172 @@ class Site(object): # Download everything valid = self.downloadContent("content.json", check_modifications=blind_includes) - if retry_bad_files: - self.onComplete.once(lambda: self.retryBadFiles(force=True)) - self.log.debug("Download done in %.3fs" % (time.time() - s)) + self.retryBadFiles(force=True) return valid - def pooledDownloadContent(self, inner_paths, pool_size=100, only_if_bad=False): - self.log.debug("New downloadContent pool: len: %s, only if bad: %s" % (len(inner_paths), only_if_bad)) - self.worker_manager.started_task_num += len(inner_paths) - pool = gevent.pool.Pool(pool_size) - num_skipped = 0 - site_size_limit = self.getSizeLimit() * 1024 * 1024 - for inner_path in inner_paths: - if not only_if_bad or inner_path in self.bad_files: - pool.spawn(self.downloadContent, inner_path) - else: - num_skipped += 1 - self.worker_manager.started_task_num -= 1 - if self.settings["size"] > site_size_limit * 0.95: - self.log.warning("Site size limit almost reached, aborting downloadContent pool") - for aborted_inner_path in inner_paths: - if aborted_inner_path in self.bad_files: - del self.bad_files[aborted_inner_path] - self.worker_manager.removeSolvedFileTasks(mark_as_good=False) - break - pool.join() - self.log.debug("Ended downloadContent pool len: %s, skipped: %s" % (len(inner_paths), num_skipped)) - - def pooledDownloadFile(self, inner_paths, pool_size=100, only_if_bad=False): - self.log.debug("New downloadFile pool: len: %s, only if bad: %s" % (len(inner_paths), only_if_bad)) - self.worker_manager.started_task_num += len(inner_paths) - pool = gevent.pool.Pool(pool_size) - num_skipped = 0 - for inner_path in inner_paths: - if not only_if_bad or inner_path in self.bad_files: - pool.spawn(self.needFile, inner_path, update=True) - else: - num_skipped += 1 - self.worker_manager.started_task_num -= 1 - self.log.debug("Ended downloadFile pool len: %s, skipped: %s" % (len(inner_paths), num_skipped)) - # Update worker, try to find client that supports listModifications command def updater(self, peers_try, queried, since): - threads = [] while 1: if not peers_try or len(queried) >= 3: # Stop after 3 successful query break peer = peers_try.pop(0) - if config.verbose: - self.log.debug("CheckModifications: Try to get updates from: %s Left: %s" % (peer, peers_try)) - - res = None - with gevent.Timeout(20, exception=False): - res = peer.listModified(since) - + if not peer.connection and len(queried) < 2: + peer.connect() # Only open new connection if less than 2 queried already + if not peer.connection or peer.connection.handshake.get("rev", 0) < 126: + continue # Not compatible + res = peer.listModified(since) if not res or "modified_files" not in res: continue # Failed query queried.append(peer) - modified_contents = [] - my_modified = self.content_manager.listModified(since) - num_old_files = 0 - for inner_path, modified in res["modified_files"].items(): # Check if the peer has newer files than we - has_newer = int(modified) > my_modified.get(inner_path, 0) - has_older = int(modified) < my_modified.get(inner_path, 0) - if inner_path not in self.bad_files and not self.content_manager.isArchived(inner_path, modified): - if has_newer: - # We dont have this file or we have older - modified_contents.append(inner_path) - self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 - if has_older and num_old_files < 5: - num_old_files += 1 - self.log.debug("CheckModifications: %s client has older version of %s, publishing there (%s/5)..." % (peer, inner_path, num_old_files)) - gevent.spawn(self.publisher, inner_path, [peer], [], 1) - if modified_contents: - self.log.debug("CheckModifications: %s new modified file from %s" % (len(modified_contents), peer)) - modified_contents.sort(key=lambda inner_path: 0 - res["modified_files"][inner_path]) # Download newest first - t = gevent.spawn(self.pooledDownloadContent, modified_contents, only_if_bad=True) - threads.append(t) - if config.verbose: - self.log.debug("CheckModifications: Waiting for %s pooledDownloadContent" % len(threads)) - gevent.joinall(threads) + for inner_path, modified in res["modified_files"].iteritems(): # Check if the peer has newer files than we + content = self.content_manager.contents.get(inner_path) + if (not content or modified > content["modified"]) and inner_path not in self.bad_files: + self.log.debug("New modified file from %s: %s" % (peer, inner_path)) + # We dont have this file or we have older + self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 # Mark as bad file + gevent.spawn(self.downloadContent, inner_path) # Download the content.json + the changed files # Check modified content.json files from peers and add modified files to bad_files # Return: Successfully queried peers [Peer, Peer...] def checkModifications(self, since=None): - s = time.time() peers_try = [] # Try these peers queried = [] # Successfully queried from these peers - limit = 5 # Wait for peers if not self.peers: - self.announce(mode="update") + self.announce() for wait in range(10): time.sleep(5 + wait) - self.log.debug("CheckModifications: Waiting for peers...") + self.log.debug("Waiting for peers...") if self.peers: break - peers_try = self.getConnectedPeers() - peers_connected_num = len(peers_try) - if peers_connected_num < limit * 2: # Add more, non-connected peers if necessary - peers_try += self.getRecentPeers(limit * 5) + peers = self.peers.values() + random.shuffle(peers) + for peer in peers: # Try to find connected good peers, but we must have at least 5 peers + if peer.findConnection() and peer.connection.handshake.get("rev", 0) > 125: # Add to the beginning if rev125 + peers_try.insert(0, peer) + elif len(peers_try) < 5: # Backup peers, add to end of the try list + peers_try.append(peer) if since is None: # No since defined, download from last modification time-1day since = self.settings.get("modified", 60 * 60 * 24) - 60 * 60 * 24 - - if config.verbose: - self.log.debug( - "CheckModifications: Try to get listModifications from peers: %s, connected: %s, since: %s" % - (peers_try, peers_connected_num, since) - ) + self.log.debug("Try to get listModifications from peers: %s since: %s" % (peers_try, since)) updaters = [] for i in range(3): updaters.append(gevent.spawn(self.updater, peers_try, queried, since)) gevent.joinall(updaters, timeout=10) # Wait 10 sec to workers done query modifications + if not queried: + gevent.joinall(updaters, timeout=10) # Wait another 10 sec if none of updaters finished - if not queried: # Start another 3 thread if first 3 is stuck - peers_try[0:0] = [peer for peer in self.getConnectedPeers() if peer.connection.connected] # Add connected peers - for _ in range(10): - gevent.joinall(updaters, timeout=10) # Wait another 10 sec if none of updaters finished - if queried: - break - - self.log.debug("CheckModifications: Queried listModifications from: %s in %.3fs since %s" % (queried, time.time() - s, since)) time.sleep(0.1) + self.log.debug("Queried listModifications from: %s" % queried) return queried # Update content.json from peers and download changed files # Return: None @util.Noparallel() - def update(self, announce=False, check_files=False, since=None): - self.content_manager.loadContent("content.json", load_includes=False) # Reload content.json + def update(self, announce=False): + self.content_manager.loadContent("content.json") # Reload content.json self.content_updated = None # Reset content updated time - - if check_files: - self.storage.updateBadFiles(quick_check=True) # Quick check and mark bad files based on file size - - if not self.isServing(): - return False - self.updateWebsocket(updating=True) - - # Remove files that no longer in content.json - self.checkBadFiles() - if announce: - self.announce(mode="update", force=True) + self.announce() - # Full update, we can reset bad files - if check_files and since == 0: - self.bad_files = {} + queried = self.checkModifications() - queried = self.checkModifications(since) + if not queried: # Not found any client that supports listModifications + self.log.debug("Fallback to old-style update") + self.redownloadContents() - changed, deleted = self.content_manager.loadContent("content.json", load_includes=False) + self.storage.checkFiles(quick_check=True) # Quick check and mark bad files based on file size + + changed, deleted = self.content_manager.loadContent("content.json") if self.bad_files: self.log.debug("Bad files: %s" % self.bad_files) - gevent.spawn(self.retryBadFiles, force=True) - - if len(queried) == 0: - # Failed to query modifications - self.content_updated = False - else: - self.content_updated = time.time() + self.download() + self.settings["size"] = self.content_manager.getTotalSize() # Update site size self.updateWebsocket(updated=True) # Update site by redownload all content.json def redownloadContents(self): # Download all content.json again content_threads = [] - for inner_path in list(self.content_manager.contents.keys()): + for inner_path in self.content_manager.contents.keys(): content_threads.append(self.needFile(inner_path, update=True, blocking=False)) self.log.debug("Waiting %s content.json to finish..." % len(content_threads)) gevent.joinall(content_threads) # Publish worker - def publisher(self, inner_path, peers, published, limit, diffs={}, event_done=None, cb_progress=None): + def publisher(self, inner_path, peers, published, limit, event_done=None): file_size = self.storage.getSize(inner_path) - content_json_modified = self.content_manager.contents[inner_path]["modified"] body = self.storage.read(inner_path) - while 1: if not peers or len(published) >= limit: if event_done: event_done.set(True) break # All peers done, or published engouht - peer = peers.pop() - if peer in published: - continue - if peer.last_content_json_update == content_json_modified: - self.log.debug("%s already received this update for %s, skipping" % (peer, inner_path)) - continue - + peer = peers.pop(0) if peer.connection and peer.connection.last_ping_delay: # Peer connected # Timeout: 5sec + size in kb + last_ping - timeout = 5 + int(file_size / 1024) + peer.connection.last_ping_delay + timeout = timeout = 5 + int(file_size / 1024) + peer.connection.last_ping_delay else: # Peer not connected - # Timeout: 10sec + size in kb - timeout = 10 + int(file_size / 1024) + # Timeout: 5sec + size in kb + timeout = timeout = 5 + int(file_size / 1024) result = {"exception": "Timeout"} for retry in range(2): try: with gevent.Timeout(timeout, False): - result = peer.publish(self.address, inner_path, body, content_json_modified, diffs) + result = peer.request("update", { + "site": self.address, + "inner_path": inner_path, + "body": body, + "peer": (config.ip_external, config.fileserver_port) + }) if result: break - except Exception as err: - self.log.error("Publish error: %s" % Debug.formatException(err)) + except Exception, err: result = {"exception": Debug.formatException(err)} if result and "ok" in result: published.append(peer) - if cb_progress and len(published) <= limit: - cb_progress(len(published), limit) - self.log.info("[OK] %s: %s %s/%s" % (peer.key, result["ok"], len(published), limit)) + self.log.info("[OK] %s: %s" % (peer.key, result["ok"])) else: if result == {"exception": "Timeout"}: - peer.onConnectionError("Publish timeout") + peer.onConnectionError() self.log.info("[FAILED] %s: %s" % (peer.key, result)) - time.sleep(0.01) # Update content.json on peers @util.Noparallel() - def publish(self, limit="default", inner_path="content.json", diffs={}, cb_progress=None): + def publish(self, limit=5, inner_path="content.json"): published = [] # Successfully published (Peer) publishers = [] # Publisher threads - if not self.peers: - self.announce(mode="more") + connected_peers = self.getConnectedPeers() + if len(connected_peers) > limit * 2: # Publish to already connected peers if possible + peers = connected_peers + else: + peers = self.peers.values() - if limit == "default": - limit = 5 - threads = limit - - peers = self.getConnectedPeers() - num_connected_peers = len(peers) - - random.shuffle(peers) - peers = sorted(peers, key=lambda peer: peer.connection.handshake.get("rev", 0) < config.rev - 100) # Prefer newer clients - - if len(peers) < limit * 2 and len(self.peers) > len(peers): # Add more, non-connected peers if necessary - peers += self.getRecentPeers(limit * 2) - - peers = set(peers) - - self.log.info("Publishing %s to %s/%s peers (connected: %s) diffs: %s (%.2fk)..." % ( - inner_path, limit, len(self.peers), num_connected_peers, list(diffs.keys()), float(len(str(diffs))) / 1024 + self.log.info("Publishing to %s/%s peers (connected: %s)..." % ( + min(len(self.peers), limit), len(self.peers), len(connected_peers) )) if not peers: return 0 # No peers found + random.shuffle(peers) event_done = gevent.event.AsyncResult() - for i in range(min(len(peers), limit, threads)): - publisher = gevent.spawn(self.publisher, inner_path, peers, published, limit, diffs, event_done, cb_progress) + for i in range(min(len(self.peers), limit, 5)): # Max 5 thread + publisher = gevent.spawn(self.publisher, inner_path, peers, published, limit, event_done) publishers.append(publisher) event_done.get() # Wait for done @@ -620,14 +364,19 @@ class Site(object): if len(published) == 0: gevent.joinall(publishers) # No successful publish, wait for all publisher - # Publish more peers in the backgroup + # Make sure the connected passive peers got the update + passive_peers = [ + peer for peer in peers + if peer.connection and not peer.connection.closed and peer.key.endswith(":0") and peer not in published + ] # Every connected passive peer that we not published to + self.log.info( - "Published %s to %s peers, publishing to %s more peers in the background" % - (inner_path, len(published), limit) + "Successfuly published to %s peers, publishing to %s more passive peers" % + (len(published), len(passive_peers)) ) - for thread in range(2): - gevent.spawn(self.publisher, inner_path, peers, published, limit=limit * 2, diffs=diffs) + for peer in passive_peers: + gevent.spawn(self.publisher, inner_path, passive_peers, published, limit=10) # Send my hashfield to every connected peer if changed gevent.spawn(self.sendMyHashfield, 100) @@ -635,8 +384,7 @@ class Site(object): return len(published) # Copy this site - @util.Noparallel() - def clone(self, address, privatekey=None, address_index=None, root_inner_path="", overwrite=False): + def clone(self, address, privatekey=None, address_index=None, overwrite=False): import shutil new_site = SiteManager.site_manager.need(address, all_file=False) default_dirs = [] # Dont copy these directories (has -default version) @@ -644,26 +392,16 @@ class Site(object): if "-default" in dir_name: default_dirs.append(dir_name.replace("-default", "")) - self.log.debug("Cloning to %s, ignore dirs: %s, root: %s" % (address, default_dirs, root_inner_path)) + self.log.debug("Cloning to %s, ignore dirs: %s" % (address, default_dirs)) # Copy root content.json if not new_site.storage.isFile("content.json") and not overwrite: - # New site: Content.json not exist yet, create a new one from source site - if "size_limit" in self.settings: - new_site.settings["size_limit"] = self.settings["size_limit"] - - # Use content.json-default is specified - if self.storage.isFile(root_inner_path + "/content.json-default"): - content_json = self.storage.loadJson(root_inner_path + "/content.json-default") - else: - content_json = self.storage.loadJson("content.json") - + # Content.json not exist yet, create a new one from source site + content_json = self.storage.loadJson("content.json") if "domain" in content_json: del content_json["domain"] content_json["title"] = "my" + content_json["title"] content_json["cloned_from"] = self.address - content_json["clone_root"] = root_inner_path - content_json["files"] = {} if address_index: content_json["address_index"] = address_index # Site owner's BIP32 index new_site.storage.writeJson("content.json", content_json) @@ -672,46 +410,27 @@ class Site(object): ) # Copy files - for content_inner_path, content in list(self.content_manager.contents.items()): - file_relative_paths = list(content.get("files", {}).keys()) - - # Sign content.json at the end to make sure every file is included - file_relative_paths.sort() - file_relative_paths.sort(key=lambda key: key.replace("-default", "").endswith("content.json")) - - for file_relative_path in file_relative_paths: + for content_inner_path, content in self.content_manager.contents.items(): + for file_relative_path in sorted(content["files"].keys()): file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to content.json file_inner_path = file_inner_path.strip("/") # Strip leading / - if not file_inner_path.startswith(root_inner_path): - self.log.debug("[SKIP] %s (not in clone root)" % file_inner_path) - continue if file_inner_path.split("/")[0] in default_dirs: # Dont copy directories that has -default postfixed alternative self.log.debug("[SKIP] %s (has default alternative)" % file_inner_path) continue file_path = self.storage.getPath(file_inner_path) # Copy the file normally to keep the -default postfixed dir and file to allow cloning later - if root_inner_path: - file_inner_path_dest = re.sub("^%s/" % re.escape(root_inner_path), "", file_inner_path) - file_path_dest = new_site.storage.getPath(file_inner_path_dest) - else: - file_inner_path_dest = file_inner_path - file_path_dest = new_site.storage.getPath(file_inner_path) - + file_path_dest = new_site.storage.getPath(file_inner_path) self.log.debug("[COPY] %s to %s..." % (file_inner_path, file_path_dest)) dest_dir = os.path.dirname(file_path_dest) if not os.path.isdir(dest_dir): os.makedirs(dest_dir) - if file_inner_path_dest.replace("-default", "") == "content.json": # Don't copy root content.json-default - continue - shutil.copy(file_path, file_path_dest) # If -default in path, create a -default less copy of the file - if "-default" in file_inner_path_dest: - file_path_dest = new_site.storage.getPath(file_inner_path_dest.replace("-default", "")) - if new_site.storage.isFile(file_inner_path_dest.replace("-default", "")) and not overwrite: - # Don't overwrite site files with default ones + if "-default" in file_inner_path: + file_path_dest = new_site.storage.getPath(file_inner_path.replace("-default", "")) + if new_site.storage.isFile(file_path_dest) and not overwrite: # Don't overwrite site files with default ones self.log.debug("[SKIP] Default file: %s (already exist)" % file_inner_path) continue self.log.debug("[COPY] Default file: %s to %s..." % (file_inner_path, file_path_dest)) @@ -721,19 +440,19 @@ class Site(object): shutil.copy(file_path, file_path_dest) # Sign if content json if file_path_dest.endswith("/content.json"): - new_site.storage.onUpdated(file_inner_path_dest.replace("-default", "")) + new_site.storage.onUpdated(file_inner_path.replace("-default", "")) new_site.content_manager.loadContent( - file_inner_path_dest.replace("-default", ""), add_bad_files=False, + file_inner_path.replace("-default", ""), add_bad_files=False, delete_removed_files=False, load_includes=False ) if privatekey: - new_site.content_manager.sign(file_inner_path_dest.replace("-default", ""), privatekey, remove_missing_optional=True) + new_site.content_manager.sign(file_inner_path.replace("-default", ""), privatekey) new_site.content_manager.loadContent( - file_inner_path_dest, add_bad_files=False, delete_removed_files=False, load_includes=False + file_inner_path, add_bad_files=False, delete_removed_files=False, load_includes=False ) if privatekey: - new_site.content_manager.sign("content.json", privatekey, remove_missing_optional=True) + new_site.content_manager.sign("content.json", privatekey) new_site.content_manager.loadContent( "content.json", add_bad_files=False, delete_removed_files=False, load_includes=False ) @@ -741,286 +460,304 @@ class Site(object): # Rebuild DB if new_site.storage.isFile("dbschema.json"): new_site.storage.closeDb() - try: - new_site.storage.rebuildDb() - except Exception as err: - self.log.error(err) + new_site.storage.rebuildDb() return new_site - @util.Pooled(100) - def pooledNeedFile(self, *args, **kwargs): - return self.needFile(*args, **kwargs) - - def isFileDownloadAllowed(self, inner_path, file_info): - # Verify space for all site - if self.settings["size"] > self.getSizeLimit() * 1024 * 1024: - return False - # Verify space for file - if file_info.get("size", 0) > config.file_size_limit * 1024 * 1024: - self.log.debug( - "File size %s too large: %sMB > %sMB, skipping..." % - (inner_path, file_info.get("size", 0) / 1024 / 1024, config.file_size_limit) - ) - return False - else: - return True - - def needFileInfo(self, inner_path): - file_info = self.content_manager.getFileInfo(inner_path) - if not file_info: - # No info for file, download all content.json first - self.log.debug("No info for %s, waiting for all content.json" % inner_path) - success = self.downloadContent("content.json", download_files=False) - if not success: - return False - file_info = self.content_manager.getFileInfo(inner_path) - return file_info - # Check and download if file not exist def needFile(self, inner_path, update=False, blocking=True, peer=None, priority=0): - if self.worker_manager.tasks.findTask(inner_path): - task = self.worker_manager.addTask(inner_path, peer, priority=priority) - if blocking: - return task["evt"].get() - else: - return task["evt"] - elif self.storage.isFile(inner_path) and not update: # File exist, no need to do anything + if self.storage.isFile(inner_path) and not update: # File exist, no need to do anything return True - elif not self.isServing(): # Site not serving + elif self.settings["serving"] is False: # Site not serving return False else: # Wait until file downloaded + self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 # Mark as bad file if not self.content_manager.contents.get("content.json"): # No content.json, download it first! - self.log.debug("Need content.json first (inner_path: %s, priority: %s)" % (inner_path, priority)) - if priority > 0: - gevent.spawn(self.announce) + self.log.debug("Need content.json first") + gevent.spawn(self.announce) if inner_path != "content.json": # Prevent double download task = self.worker_manager.addTask("content.json", peer) - task["evt"].get() + task.get() self.content_manager.loadContent() if not self.content_manager.contents.get("content.json"): return False # Content.json download failed - file_info = None - if not inner_path.endswith("content.json"): - file_info = self.needFileInfo(inner_path) - if not file_info: + if not inner_path.endswith("content.json") and not self.content_manager.getFileInfo(inner_path): + # No info for file, download all content.json first + self.log.debug("No info for %s, waiting for all content.json" % inner_path) + success = self.downloadContent("content.json", download_files=False) + if not success: return False - if "cert_signers" in file_info and not file_info["content_inner_path"] in self.content_manager.contents: - self.log.debug("Missing content.json for requested user file: %s" % inner_path) - if self.bad_files.get(file_info["content_inner_path"], 0) > 5: - self.log.debug("File %s not reachable: retry %s" % ( - inner_path, self.bad_files.get(file_info["content_inner_path"], 0) - )) - return False - self.downloadContent(file_info["content_inner_path"]) + if not self.content_manager.getFileInfo(inner_path): + return False # Still no info for file - if not self.isFileDownloadAllowed(inner_path, file_info): - self.log.debug("%s: Download not allowed" % inner_path) - return False - - self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 # Mark as bad file - - task = self.worker_manager.addTask(inner_path, peer, priority=priority, file_info=file_info) + task = self.worker_manager.addTask(inner_path, peer, priority=priority) if blocking: - return task["evt"].get() + return task.get() else: - return task["evt"] + return task # Add or update a peer to site # return_peer: Always return the peer even if it was already present - def addPeer(self, ip, port, return_peer=False, connection=None, source="other"): - if not ip or ip == "0.0.0.0": + def addPeer(self, ip, port, return_peer=False): + if not ip: return False - + if (ip, port) in self.peer_blacklist: + return False # Ignore blacklist (eg. myself) key = "%s:%s" % (ip, port) - peer = self.peers.get(key) - if peer: # Already has this ip - peer.found(source) + if key in self.peers: # Already has this ip + self.peers[key].found() if return_peer: # Always return peer - return peer + return self.peers[key] else: return False else: # New peer - if (ip, port) in self.peer_blacklist: - return False # Ignore blacklist (eg. myself) peer = Peer(ip, port, self) self.peers[key] = peer - peer.found(source) return peer - def announce(self, *args, **kwargs): - if self.isServing(): - self.announcer.announce(*args, **kwargs) + # Gather peer from connected peers + @util.Noparallel(blocking=False) + def announcePex(self, query_num=2, need_num=5): + peers = [peer for peer in self.peers.values() if peer.connection and peer.connection.connected] # Connected peers + if len(peers) == 0: # Small number of connected peers for this site, connect to any + self.log.debug("Small number of peers detected...query all of peers using pex") + peers = self.peers.values() + need_num = 10 - # Keep connections to get the updates - def needConnections(self, num=None, check_site_on_reconnect=False): - if num is None: - if len(self.peers) < 50: - num = 3 + random.shuffle(peers) + done = 0 + added = 0 + for peer in peers: + if peer.connection: # Has connection + if "port_opened" in peer.connection.handshake: # This field added recently, so probably has has peer exchange + res = peer.pex(need_num=need_num) + else: + res = False + else: # No connection + res = peer.pex(need_num=need_num) + if type(res) == int: # We have result + done += 1 + added += res + if res: + self.worker_manager.onPeers() + self.updateWebsocket(peers_added=res) + if done == query_num: + break + self.log.debug("Queried pex from %s peers got %s new peers." % (done, added)) + + # Gather peers from tracker + # Return: Complete time or False on error + def announceTracker(self, protocol, address, fileserver_port, address_hash, my_peer_id): + s = time.time() + if protocol == "udp": # Udp tracker + if config.disable_udp: + return False # No udp supported + ip, port = address.split(":") + tracker = UdpTrackerClient(ip, int(port)) + tracker.peer_port = fileserver_port + try: + tracker.connect() + tracker.poll_once() + tracker.announce(info_hash=address_hash, num_want=50) + back = tracker.poll_once() + peers = back["response"]["peers"] + except Exception, err: + return False + + else: # Http tracker + params = { + 'info_hash': binascii.a2b_hex(address_hash), + 'peer_id': my_peer_id, 'port': fileserver_port, + 'uploaded': 0, 'downloaded': 0, 'left': 0, 'compact': 1, 'numwant': 30, + 'event': 'started' + } + req = None + try: + url = "http://" + address + "?" + urllib.urlencode(params) + # Load url + with gevent.Timeout(30, False): # Make sure of timeout + req = urllib2.urlopen(url, timeout=25) + response = req.read() + req.fp._sock.recv = None # Hacky avoidance of memory leak for older python versions + req.close() + req = None + if not response: + self.log.debug("Http tracker %s response error" % url) + return False + # Decode peers + peer_data = bencode.decode(response)["peers"] + response = None + peer_count = len(peer_data) / 6 + peers = [] + for peer_offset in xrange(peer_count): + off = 6 * peer_offset + peer = peer_data[off:off + 6] + addr, port = struct.unpack('!LH', peer) + peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port}) + except Exception, err: + self.log.debug("Http tracker %s error: %s" % (url, err)) + if req: + req.close() + req = None + return False + + # Adding peers + added = 0 + for peer in peers: + if not peer["port"]: + continue # Dont add peers with port 0 + if self.addPeer(peer["addr"], peer["port"]): + added += 1 + if added: + self.worker_manager.onPeers() + self.updateWebsocket(peers_added=added) + self.log.debug("Found %s peers, new: %s" % (len(peers), added)) + return time.time() - s + + # Add myself and get other peers from tracker + def announce(self, force=False, num=5, pex=True): + if time.time() < self.time_announce + 30 and not force: + return # No reannouncing within 30 secs + self.time_announce = time.time() + + if config.disable_udp: + trackers = [tracker for tracker in config.trackers if not tracker.startswith("udp://")] + else: + trackers = config.trackers + if num == 1: # Only announce on one tracker, increment the queried tracker id + self.last_tracker_id += 1 + self.last_tracker_id = self.last_tracker_id % len(trackers) + trackers = [trackers[self.last_tracker_id]] # We only going to use this one + + errors = [] + slow = [] + address_hash = hashlib.sha1(self.address).hexdigest() # Site address hash + my_peer_id = sys.modules["main"].file_server.peer_id + + if sys.modules["main"].file_server.port_opened: + fileserver_port = config.fileserver_port + else: # Port not opened, report port 0 + fileserver_port = 0 + + s = time.time() + announced = 0 + threads = [] + + for tracker in trackers: # Start announce threads + protocol, address = tracker.split("://") + thread = gevent.spawn(self.announceTracker, protocol, address, fileserver_port, address_hash, my_peer_id) + threads.append(thread) + thread.address = address + thread.protocol = protocol + if len(threads) > num: # Announce limit + break + + gevent.joinall(threads, timeout=10) # Wait for announce finish + + for thread in threads: + if thread.value: + if thread.value > 1: + slow.append("%.2fs %s://%s" % (thread.value, thread.protocol, thread.address)) + announced += 1 else: - num = 6 - need = min(len(self.peers), num, config.connected_limit) # Need 5 peer, but max total peers + if thread.ready(): + errors.append("%s://%s" % (thread.protocol, thread.address)) + else: # Still running + slow.append("10s+ %s://%s" % (thread.protocol, thread.address)) - connected = len(self.getConnectedPeers()) + # Save peers num + self.settings["peers"] = len(self.peers) + self.saveSettings() - connected_before = connected + if len(errors) < min(num, len(trackers)): # Less errors than total tracker nums + self.log.debug( + "Announced port %s to %s trackers in %.3fs, errors: %s, slow: %s" % + (fileserver_port, announced, time.time() - s, errors, slow) + ) + else: + if num > 1: + self.log.error("Announce to %s trackers in %.3fs, failed" % (announced, time.time() - s)) + + if pex: + if not [peer for peer in self.peers.values() if peer.connection and peer.connection.connected]: + # If no connected peer yet then wait for connections + gevent.spawn_later(3, self.announcePex, need_num=10) # Spawn 3 secs later + else: # Else announce immediately + self.announcePex() + + # Keep connections to get the updates (required for passive clients) + def needConnections(self, num=3): + need = min(len(self.peers), num) # Need 3 peer, but max total peers + + connected = 0 + for peer in self.peers.values(): # Check current connected number + if peer.connection and peer.connection.connected: + connected += 1 self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, connected, len(self.peers))) if connected < need: # Need more than we have - for peer in self.getRecentPeers(30): + for peer in self.peers.values(): if not peer.connection or not peer.connection.connected: # No peer connection or disconnected peer.pex() # Initiate peer exchange if peer.connection and peer.connection.connected: connected += 1 # Successfully connected if connected >= need: break - self.log.debug( - "Connected before: %s, after: %s. Check site: %s." % - (connected_before, connected, check_site_on_reconnect) - ) - - if check_site_on_reconnect and connected_before == 0 and connected > 0 and self.connection_server.has_internet: - gevent.spawn(self.update, check_files=False) - return connected - # Return: Probably peers verified to be connectable recently - def getConnectablePeers(self, need_num=5, ignore=[], allow_private=True): - peers = list(self.peers.values()) + # Return: Probably working, connectable Peers + def getConnectablePeers(self, need_num=5, ignore=[]): + peers = self.peers.values() + random.shuffle(peers) found = [] for peer in peers: if peer.key.endswith(":0"): continue # Not connectable if not peer.connection: continue # No connection - if peer.ip.endswith(".onion") and not self.connection_server.tor_manager.enabled: - continue # Onion not supported if peer.key in ignore: continue # The requester has this peer if time.time() - peer.connection.last_recv_time > 60 * 60 * 2: # Last message more than 2 hours ago peer.connection = None # Cleanup: Dead connection continue - if not allow_private and helper.isPrivateIp(peer.ip): - continue found.append(peer) if len(found) >= need_num: break # Found requested number of peers - if len(found) < need_num: # Return not that good peers - found += [ - peer for peer in peers - if not peer.key.endswith(":0") and - peer.key not in ignore and - (allow_private or not helper.isPrivateIp(peer.ip)) - ][0:need_num - len(found)] + if (not found and not ignore) or (need_num > 5 and need_num < 100 and len(found) < need_num): + # Return not that good peers: Not found any peer and the requester dont have any or cant give enough peer + found = [peer for peer in peers if not peer.key.endswith(":0") and peer.key not in ignore][0:need_num - len(found)] return found - # Return: Recently found peers - def getRecentPeers(self, need_num): - found = list(set(self.peers_recent)) - self.log.debug( - "Recent peers %s of %s (need: %s)" % - (len(found), len(self.peers), need_num) - ) - - if len(found) >= need_num or len(found) >= len(self.peers): - return sorted( - found, - key=lambda peer: peer.reputation, - reverse=True - )[0:need_num] - - # Add random peers - need_more = need_num - len(found) - if not self.connection_server.tor_manager.enabled: - peers = [peer for peer in self.peers.values() if not peer.ip.endswith(".onion")] - else: - peers = list(self.peers.values()) - - found_more = sorted( - peers[0:need_more * 50], - key=lambda peer: peer.reputation, - reverse=True - )[0:need_more * 2] - - found += found_more - - return found[0:need_num] - def getConnectedPeers(self): - back = [] - if not self.connection_server: - return [] + return [peer for peer in self.peers.values() if peer.connection and peer.connection.connected] - tor_manager = self.connection_server.tor_manager - for connection in self.connection_server.connections: - if not connection.connected and time.time() - connection.start_time > 20: # Still not connected after 20s + # Cleanup probably dead peers + def cleanupPeers(self): + peers = self.peers.values() + if len(peers) < 20: + return False + removed = 0 + + for peer in peers: + if peer.connection and peer.connection.connected: continue - peer = self.peers.get("%s:%s" % (connection.ip, connection.port)) - if peer: - if connection.ip.endswith(".onion") and connection.target_onion and tor_manager.start_onions: - # Check if the connection is made with the onion address created for the site - valid_target_onions = (tor_manager.getOnion(self.address), tor_manager.getOnion("global")) - if connection.target_onion not in valid_target_onions: - continue - if not peer.connection: - peer.connect(connection) - back.append(peer) - return back + if peer.connection and not peer.connection.connected: + peer.connection = None # Dead connection + if time.time() - peer.time_found > 60 * 60 * 4: # Not found on tracker or via pex in last 4 hour + peer.remove() + removed += 1 + if removed > 5: # Don't remove too much at once + break - # Cleanup probably dead peers and close connection if too much - def cleanupPeers(self, peers_protected=[]): - peers = list(self.peers.values()) - if len(peers) > 20: - # Cleanup old peers - removed = 0 - if len(peers) > 1000: - ttl = 60 * 60 * 1 - else: - ttl = 60 * 60 * 4 - - for peer in peers: - if peer.connection and peer.connection.connected: - continue - if peer.connection and not peer.connection.connected: - peer.connection = None # Dead connection - if time.time() - peer.time_found > ttl: # Not found on tracker or via pex in last 4 hour - peer.remove("Time found expired") - removed += 1 - if removed > len(peers) * 0.1: # Don't remove too much at once - break - - if removed: - self.log.debug("Cleanup peers result: Removed %s, left: %s" % (removed, len(self.peers))) - - # Close peers over the limit - closed = 0 - connected_peers = [peer for peer in self.getConnectedPeers() if peer.connection.connected] # Only fully connected peers - need_to_close = len(connected_peers) - config.connected_limit - - if closed < need_to_close: - # Try to keep connections with more sites - for peer in sorted(connected_peers, key=lambda peer: min(peer.connection.sites, 5)): - if not peer.connection: - continue - if peer.key in peers_protected: - continue - if peer.connection.sites > 5: - break - peer.connection.close("Cleanup peers") - peer.connection = None - closed += 1 - if closed >= need_to_close: - break - - if need_to_close > 0: - self.log.debug("Connected: %s, Need to close: %s, Closed: %s" % (len(connected_peers), need_to_close, closed)) + if removed: + self.log.debug("Cleanup peers result: Removed %s, left: %s" % (removed, len(self.peers))) # Send hashfield to peers - def sendMyHashfield(self, limit=5): + def sendMyHashfield(self, limit=3): if not self.content_manager.hashfield: # No optional files return False @@ -1032,17 +769,15 @@ class Site(object): if sent >= limit: break if sent: - my_hashfield_changed = self.content_manager.hashfield.time_changed - self.log.debug("Sent my hashfield (chaged %.3fs ago) to %s peers" % (time.time() - my_hashfield_changed, sent)) + self.log.debug("Sent my hashfield to %s peers" % sent) return sent # Update hashfield - def updateHashfield(self, limit=5): + def updateHashfield(self, limit=3): # Return if no optional files - if not self.content_manager.hashfield and not self.content_manager.has_optional_files: + if not self.content_manager.hashfield and not self.content_manager.contents.get("content.json", {}).get("files_optional"): return False - s = time.time() queried = 0 connected_peers = self.getConnectedPeers() for peer in connected_peers: @@ -1053,31 +788,9 @@ class Site(object): if queried >= limit: break if queried: - self.log.debug("Queried hashfield from %s peers in %.3fs" % (queried, time.time() - s)) + self.log.debug("Queried hashfield from %s peers" % queried) return queried - # Returns if the optional file is need to be downloaded or not - def isDownloadable(self, inner_path): - return self.settings.get("autodownloadoptional") - - def delete(self): - self.log.info("Deleting site...") - s = time.time() - self.settings["serving"] = False - self.settings["deleting"] = True - self.saveSettings() - num_greenlets = self.greenlet_manager.stopGreenlets("Site %s deleted" % self.address) - self.worker_manager.running = False - num_workers = self.worker_manager.stopWorkers() - SiteManager.site_manager.delete(self.address) - self.content_manager.contents.db.deleteSite(self) - self.updateWebsocket(deleted=True) - self.storage.deleteFiles() - self.log.info( - "Deleted site in %.3fs (greenlets: %s, workers: %s)" % - (time.time() - s, num_greenlets, num_workers) - ) - # - Events - # Add event listeners @@ -1094,19 +807,12 @@ class Site(object): # Send site status update to websocket clients def updateWebsocket(self, **kwargs): if kwargs: - param = {"event": list(kwargs.items())[0]} + param = {"event": kwargs.items()[0]} else: param = None for ws in self.websockets: ws.event("siteChanged", self, param) - def messageWebsocket(self, message, type="info", progress=None): - for ws in self.websockets: - if progress is None: - ws.cmd("notification", [type, message]) - else: - ws.cmd("progress", [type, message, progress]) - # File download started @util.Noparallel(blocking=False) def fileStarted(self): @@ -1117,14 +823,11 @@ class Site(object): def fileDone(self, inner_path): # File downloaded, remove it from bad files if inner_path in self.bad_files: - if config.verbose: - self.log.debug("Bad file solved: %s" % inner_path) + self.log.debug("Bad file solved: %s" % inner_path) del(self.bad_files[inner_path]) # Update content.json last downlad time if inner_path == "content.json": - if not self.settings.get("downloaded"): - self.settings["downloaded"] = int(time.time()) self.content_updated = time.time() self.updateWebsocket(file_done=inner_path) @@ -1134,14 +837,7 @@ class Site(object): if inner_path == "content.json": self.content_updated = False self.log.debug("Can't update content.json") - if inner_path in self.bad_files and self.connection_server.has_internet: + if inner_path in self.bad_files: self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 self.updateWebsocket(file_failed=inner_path) - - if self.bad_files.get(inner_path, 0) > 30: - self.fileForgot(inner_path) - - def fileForgot(self, inner_path): - self.log.debug("Giving up on %s" % inner_path) - del self.bad_files[inner_path] # Give up after 30 tries diff --git a/src/Site/SiteAnnouncer.py b/src/Site/SiteAnnouncer.py deleted file mode 100644 index 2fd63e82..00000000 --- a/src/Site/SiteAnnouncer.py +++ /dev/null @@ -1,293 +0,0 @@ -import random -import time -import hashlib -import re -import collections - -import gevent - -from Plugin import PluginManager -from Config import config -from Debug import Debug -from util import helper -from greenlet import GreenletExit -import util - - -class AnnounceError(Exception): - pass - -global_stats = collections.defaultdict(lambda: collections.defaultdict(int)) - - -@PluginManager.acceptPlugins -class SiteAnnouncer(object): - def __init__(self, site): - self.site = site - self.stats = {} - self.fileserver_port = config.fileserver_port - self.peer_id = self.site.connection_server.peer_id - self.last_tracker_id = random.randint(0, 10) - self.time_last_announce = 0 - - def getTrackers(self): - return config.trackers - - def getSupportedTrackers(self): - trackers = self.getTrackers() - - if not self.site.connection_server.tor_manager.enabled: - trackers = [tracker for tracker in trackers if ".onion" not in tracker] - - trackers = [tracker for tracker in trackers if self.getAddressParts(tracker)] # Remove trackers with unknown address - - if "ipv6" not in self.site.connection_server.supported_ip_types: - trackers = [tracker for tracker in trackers if helper.getIpType(self.getAddressParts(tracker)["ip"]) != "ipv6"] - - return trackers - - def getAnnouncingTrackers(self, mode): - trackers = self.getSupportedTrackers() - - if trackers and (mode == "update" or mode == "more"): # Only announce on one tracker, increment the queried tracker id - self.last_tracker_id += 1 - self.last_tracker_id = self.last_tracker_id % len(trackers) - trackers_announcing = [trackers[self.last_tracker_id]] # We only going to use this one - else: - trackers_announcing = trackers - - return trackers_announcing - - def getOpenedServiceTypes(self): - back = [] - # Type of addresses they can reach me - if config.trackers_proxy == "disable" and config.tor != "always": - for ip_type, opened in list(self.site.connection_server.port_opened.items()): - if opened: - back.append(ip_type) - if self.site.connection_server.tor_manager.start_onions: - back.append("onion") - return back - - @util.Noparallel(blocking=False) - def announce(self, force=False, mode="start", pex=True): - if time.time() - self.time_last_announce < 30 and not force: - return # No reannouncing within 30 secs - if force: - self.site.log.debug("Force reannounce in mode %s" % mode) - - self.fileserver_port = config.fileserver_port - self.time_last_announce = time.time() - - trackers = self.getAnnouncingTrackers(mode) - - if config.verbose: - self.site.log.debug("Tracker announcing, trackers: %s" % trackers) - - errors = [] - slow = [] - s = time.time() - threads = [] - num_announced = 0 - - for tracker in trackers: # Start announce threads - tracker_stats = global_stats[tracker] - # Reduce the announce time for trackers that looks unreliable - time_announce_allowed = time.time() - 60 * min(30, tracker_stats["num_error"]) - if tracker_stats["num_error"] > 5 and tracker_stats["time_request"] > time_announce_allowed and not force: - if config.verbose: - self.site.log.debug("Tracker %s looks unreliable, announce skipped (error: %s)" % (tracker, tracker_stats["num_error"])) - continue - thread = self.site.greenlet_manager.spawn(self.announceTracker, tracker, mode=mode) - threads.append(thread) - thread.tracker = tracker - - time.sleep(0.01) - self.updateWebsocket(trackers="announcing") - - gevent.joinall(threads, timeout=20) # Wait for announce finish - - for thread in threads: - if thread.value is None: - continue - if thread.value is not False: - if thread.value > 1.0: # Takes more than 1 second to announce - slow.append("%.2fs %s" % (thread.value, thread.tracker)) - num_announced += 1 - else: - if thread.ready(): - errors.append(thread.tracker) - else: # Still running - slow.append("30s+ %s" % thread.tracker) - - # Save peers num - self.site.settings["peers"] = len(self.site.peers) - - if len(errors) < len(threads): # At least one tracker finished - if len(trackers) == 1: - announced_to = trackers[0] - else: - announced_to = "%s/%s trackers" % (num_announced, len(threads)) - if mode != "update" or config.verbose: - self.site.log.debug( - "Announced in mode %s to %s in %.3fs, errors: %s, slow: %s" % - (mode, announced_to, time.time() - s, errors, slow) - ) - else: - if len(threads) > 1: - self.site.log.error("Announce to %s trackers in %.3fs, failed" % (len(threads), time.time() - s)) - if len(threads) == 1 and mode != "start": # Move to next tracker - self.site.log.debug("Tracker failed, skipping to next one...") - self.site.greenlet_manager.spawnLater(1.0, self.announce, force=force, mode=mode, pex=pex) - - self.updateWebsocket(trackers="announced") - - if pex: - self.updateWebsocket(pex="announcing") - if mode == "more": # Need more peers - self.announcePex(need_num=10) - else: - self.announcePex() - - self.updateWebsocket(pex="announced") - - def getTrackerHandler(self, protocol): - return None - - def getAddressParts(self, tracker): - if "://" not in tracker or not re.match("^[A-Za-z0-9:/\\.#-]+$", tracker): - return None - protocol, address = tracker.split("://", 1) - if ":" in address: - ip, port = address.rsplit(":", 1) - else: - ip = address - if protocol.startswith("https"): - port = 443 - else: - port = 80 - back = {} - back["protocol"] = protocol - back["address"] = address - back["ip"] = ip - back["port"] = port - return back - - def announceTracker(self, tracker, mode="start", num_want=10): - s = time.time() - address_parts = self.getAddressParts(tracker) - if not address_parts: - self.site.log.warning("Tracker %s error: Invalid address" % tracker) - return False - - if tracker not in self.stats: - self.stats[tracker] = {"status": "", "num_request": 0, "num_success": 0, "num_error": 0, "time_request": 0, "time_last_error": 0} - - last_status = self.stats[tracker]["status"] - self.stats[tracker]["status"] = "announcing" - self.stats[tracker]["time_request"] = time.time() - global_stats[tracker]["time_request"] = time.time() - if config.verbose: - self.site.log.debug("Tracker announcing to %s (mode: %s)" % (tracker, mode)) - if mode == "update": - num_want = 10 - else: - num_want = 30 - - handler = self.getTrackerHandler(address_parts["protocol"]) - error = None - try: - if handler: - peers = handler(address_parts["address"], mode=mode, num_want=num_want) - else: - raise AnnounceError("Unknown protocol: %s" % address_parts["protocol"]) - except Exception as err: - self.site.log.warning("Tracker %s announce failed: %s in mode %s" % (tracker, Debug.formatException(err), mode)) - error = err - - if error: - self.stats[tracker]["status"] = "error" - self.stats[tracker]["time_status"] = time.time() - self.stats[tracker]["last_error"] = str(error) - self.stats[tracker]["time_last_error"] = time.time() - if self.site.connection_server.has_internet: - self.stats[tracker]["num_error"] += 1 - self.stats[tracker]["num_request"] += 1 - global_stats[tracker]["num_request"] += 1 - if self.site.connection_server.has_internet: - global_stats[tracker]["num_error"] += 1 - self.updateWebsocket(tracker="error") - return False - - if peers is None: # Announce skipped - self.stats[tracker]["time_status"] = time.time() - self.stats[tracker]["status"] = last_status - return None - - self.stats[tracker]["status"] = "announced" - self.stats[tracker]["time_status"] = time.time() - self.stats[tracker]["num_success"] += 1 - self.stats[tracker]["num_request"] += 1 - global_stats[tracker]["num_request"] += 1 - global_stats[tracker]["num_error"] = 0 - - if peers is True: # Announce success, but no peers returned - return time.time() - s - - # Adding peers - added = 0 - for peer in peers: - if peer["port"] == 1: # Some trackers does not accept port 0, so we send port 1 as not-connectable - peer["port"] = 0 - if not peer["port"]: - continue # Dont add peers with port 0 - if self.site.addPeer(peer["addr"], peer["port"], source="tracker"): - added += 1 - - if added: - self.site.worker_manager.onPeers() - self.site.updateWebsocket(peers_added=added) - - if config.verbose: - self.site.log.debug( - "Tracker result: %s://%s (found %s peers, new: %s, total: %s)" % - (address_parts["protocol"], address_parts["address"], len(peers), added, len(self.site.peers)) - ) - return time.time() - s - - @util.Noparallel(blocking=False) - def announcePex(self, query_num=2, need_num=5): - peers = self.site.getConnectedPeers() - if len(peers) == 0: # Wait 3s for connections - time.sleep(3) - peers = self.site.getConnectedPeers() - - if len(peers) == 0: # Small number of connected peers for this site, connect to any - peers = list(self.site.getRecentPeers(20)) - need_num = 10 - - random.shuffle(peers) - done = 0 - total_added = 0 - for peer in peers: - num_added = peer.pex(need_num=need_num) - if num_added is not False: - done += 1 - total_added += num_added - if num_added: - self.site.worker_manager.onPeers() - self.site.updateWebsocket(peers_added=num_added) - else: - time.sleep(0.1) - if done == query_num: - break - self.site.log.debug("Pex result: from %s peers got %s new peers." % (done, total_added)) - - def updateWebsocket(self, **kwargs): - if kwargs: - param = {"event": list(kwargs.items())[0]} - else: - param = None - - for ws in self.site.websockets: - ws.event("announcerChanged", self.site, param) diff --git a/src/Site/SiteManager.py b/src/Site/SiteManager.py index 684d69fc..3890f8b1 100644 --- a/src/Site/SiteManager.py +++ b/src/Site/SiteManager.py @@ -2,225 +2,87 @@ import json import logging import re import os -import time -import atexit -import gevent - -import util from Plugin import PluginManager -from Content import ContentDb from Config import config from util import helper -from util import RateLimit -from util import Cached - @PluginManager.acceptPlugins class SiteManager(object): + def __init__(self): - self.log = logging.getLogger("SiteManager") - self.log.debug("SiteManager created.") - self.sites = {} - self.sites_changed = int(time.time()) - self.loaded = False - gevent.spawn(self.saveTimer) - atexit.register(lambda: self.save(recalculate_size=True)) + self.sites = None # Load all sites from data/sites.json - @util.Noparallel() - def load(self, cleanup=True, startup=False): - from Debug import Debug - self.log.info("Loading sites... (cleanup: %s, startup: %s)" % (cleanup, startup)) - self.loaded = False - from .Site import Site + def load(self): + from Site import Site + if not self.sites: + self.sites = {} address_found = [] added = 0 - load_s = time.time() # Load new adresses - try: - json_path = "%s/sites.json" % config.data_dir - data = json.load(open(json_path)) - except Exception as err: - raise Exception("Unable to load %s: %s" % (json_path, err)) - - sites_need = [] - - for address, settings in data.items(): - if address not in self.sites: - if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)): - # Root content.json exists, try load site - s = time.time() - try: - site = Site(address, settings=settings) - site.content_manager.contents.get("content.json") - except Exception as err: - self.log.debug("Error loading site %s: %s" % (address, err)) - continue - self.sites[address] = site - self.log.debug("Loaded site %s in %.3fs" % (address, time.time() - s)) - added += 1 - elif startup: - # No site directory, start download - self.log.debug("Found new site in sites.json: %s" % address) - sites_need.append([address, settings]) - added += 1 - + for address in json.load(open("%s/sites.json" % config.data_dir)): + if address not in self.sites and os.path.isfile("%s/%s/content.json" % (config.data_dir, address)): + self.sites[address] = Site(address) + added += 1 address_found.append(address) # Remove deleted adresses - if cleanup: - for address in list(self.sites.keys()): - if address not in address_found: - del(self.sites[address]) - self.log.debug("Removed site: %s" % address) + for address in self.sites.keys(): + if address not in address_found: + del(self.sites[address]) + logging.debug("Removed site: %s" % address) - # Remove orpan sites from contentdb - content_db = ContentDb.getContentDb() - for row in content_db.execute("SELECT * FROM site").fetchall(): - address = row["address"] - if address not in self.sites and address not in address_found: - self.log.info("Deleting orphan site from content.db: %s" % address) - - try: - content_db.execute("DELETE FROM site WHERE ?", {"address": address}) - except Exception as err: - self.log.error("Can't delete site %s from content_db: %s" % (address, err)) - - if address in content_db.site_ids: - del content_db.site_ids[address] - if address in content_db.sites: - del content_db.sites[address] - - self.loaded = True - for address, settings in sites_need: - gevent.spawn(self.need, address, settings=settings) if added: - self.log.info("Added %s sites in %.3fs" % (added, time.time() - load_s)) - - def saveDelayed(self): - RateLimit.callAsync("Save sites.json", allowed_again=5, func=self.save) - - def save(self, recalculate_size=False): - if not self.sites: - self.log.debug("Save skipped: No sites found") - return - if not self.loaded: - self.log.debug("Save skipped: Not loaded") - return - s = time.time() - data = {} - # Generate data file - s = time.time() - for address, site in list(self.list().items()): - if recalculate_size: - site.settings["size"], site.settings["size_optional"] = site.content_manager.getTotalSize() # Update site size - data[address] = site.settings - data[address]["cache"] = site.getSettingsCache() - time_generate = time.time() - s - - s = time.time() - if data: - helper.atomicWrite("%s/sites.json" % config.data_dir, helper.jsonDumps(data).encode("utf8")) - else: - self.log.debug("Save error: No data") - time_write = time.time() - s - - # Remove cache from site settings - for address, site in self.list().items(): - site.settings["cache"] = {} - - self.log.debug("Saved sites in %.2fs (generate: %.2fs, write: %.2fs)" % (time.time() - s, time_generate, time_write)) - - def saveTimer(self): - while 1: - time.sleep(60 * 10) - self.save(recalculate_size=True) + logging.debug("SiteManager added %s sites" % added) # Checks if its a valid address def isAddress(self, address): return re.match("^[A-Za-z0-9]{26,35}$", address) - def isDomain(self, address): - return False - - @Cached(timeout=10) - def isDomainCached(self, address): - return self.isDomain(address) - - def resolveDomain(self, domain): - return False - - @Cached(timeout=10) - def resolveDomainCached(self, domain): - return self.resolveDomain(domain) - # Return: Site object or None if not found def get(self, address): - if self.isDomainCached(address): - address_resolved = self.resolveDomainCached(address) - if address_resolved: - address = address_resolved - - if not self.loaded: # Not loaded yet - self.log.debug("Loading site: %s)..." % address) + if self.sites is None: # Not loaded yet self.load() - site = self.sites.get(address) - - return site - - def add(self, address, all_file=True, settings=None, **kwargs): - from .Site import Site - self.sites_changed = int(time.time()) - # Try to find site with differect case - for recover_address, recover_site in list(self.sites.items()): - if recover_address.lower() == address.lower(): - return recover_site - - if not self.isAddress(address): - return False # Not address: %s % address - self.log.debug("Added new site: %s" % address) - config.loadTrackersFile() - site = Site(address, settings=settings) - self.sites[address] = site - if not site.settings["serving"]: # Maybe it was deleted before - site.settings["serving"] = True - site.saveSettings() - if all_file: # Also download user files on first sync - site.download(check_size=True, blind_includes=True) - return site + return self.sites.get(address) # Return or create site and start download site files - def need(self, address, *args, **kwargs): - if self.isDomainCached(address): - address_resolved = self.resolveDomainCached(address) - if address_resolved: - address = address_resolved - + def need(self, address, all_file=True): + from Site import Site site = self.get(address) if not site: # Site not exist yet - site = self.add(address, *args, **kwargs) + if not self.isAddress(address): + return False # Not address: %s % address + logging.debug("Added new site: %s" % address) + site = Site(address) + self.sites[address] = site + if not site.settings["serving"]: # Maybe it was deleted before + site.settings["serving"] = True + site.saveSettings() + if all_file: # Also download user files on first sync + site.download(blind_includes=True) + else: + if all_file: + site.download() + return site def delete(self, address): - self.sites_changed = int(time.time()) - self.log.debug("Deleted site: %s" % address) + logging.debug("SiteManager deleted site: %s" % address) del(self.sites[address]) # Delete from sites.json - self.save() + sites_settings = json.load(open("%s/sites.json" % config.data_dir)) + del(sites_settings[address]) + helper.atomicWrite("%s/sites.json" % config.data_dir, json.dumps(sites_settings, indent=2, sort_keys=True)) # Lazy load sites def list(self): - if not self.loaded: # Not loaded yet - self.log.debug("Sites not loaded yet...") - self.load(startup=True) + logging.debug("Loading sites...") + if self.sites is None: # Not loaded yet + self.load() return self.sites site_manager = SiteManager() # Singletone -if config.action == "main": # Don't connect / add myself to peerlist - peer_blacklist = [("127.0.0.1", config.fileserver_port), ("::1", config.fileserver_port)] -else: - peer_blacklist = [] - +peer_blacklist = [] # Dont download from this peers diff --git a/src/Site/SiteStorage.py b/src/Site/SiteStorage.py index 27032e79..6edd77dc 100644 --- a/src/Site/SiteStorage.py +++ b/src/Site/SiteStorage.py @@ -3,34 +3,23 @@ import re import shutil import json import time -import errno -from collections import defaultdict +import sys import sqlite3 import gevent.event -import util -from util import SafeRe -from Db.Db import Db +from Db import Db from Debug import Debug from Config import config from util import helper -from util import ThreadPool -from Plugin import PluginManager -from Translate import translate as _ -thread_pool_fs_read = ThreadPool.ThreadPool(config.threads_fs_read, name="FS read") -thread_pool_fs_write = ThreadPool.ThreadPool(config.threads_fs_write, name="FS write") -thread_pool_fs_batch = ThreadPool.ThreadPool(1, name="FS batch") +class SiteStorage: - -@PluginManager.acceptPlugins -class SiteStorage(object): def __init__(self, site, allow_create=True): self.site = site self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory - self.allowed_dir = os.path.abspath(self.directory) # Only serve file within this dir + self.allowed_dir = os.path.abspath(self.directory.decode(sys.getfilesystemencoding())) # Only serve/modify file within this dir self.log = site.log self.db = None # Db class self.db_checked = False # Checked db tables since startup @@ -43,350 +32,189 @@ class SiteStorage(object): else: raise Exception("Directory not exists: %s" % self.directory) - def getDbFile(self): - if self.db: - return self.db.schema["db_file"] - else: - if self.isFile("dbschema.json"): - schema = self.loadJson("dbschema.json") - return schema["db_file"] - else: - return False - - # Create new databaseobject with the site's schema - def openDb(self, close_idle=False): - schema = self.getDbSchema() + # Load db from dbschema.json + def openDb(self, check=True): + schema = self.loadJson("dbschema.json") db_path = self.getPath(schema["db_file"]) - return Db(schema, db_path, close_idle=close_idle) + if check: + if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: # Not exist or null + self.rebuildDb() - def closeDb(self, reason="Unknown (SiteStorage)"): + if not self.db: + self.db = Db(schema, db_path) + + if check and not self.db_checked: + changed_tables = self.db.checkTables() + if changed_tables: + self.rebuildDb(delete_db=False) # TODO: only update the changed table datas + + def closeDb(self): if self.db: - self.db.close(reason) + self.db.close() self.event_db_busy = None self.db = None - def getDbSchema(self): - try: - self.site.needFile("dbschema.json") - schema = self.loadJson("dbschema.json") - except Exception as err: - raise Exception("dbschema.json is not a valid JSON: %s" % err) - return schema - - def loadDb(self): - self.log.debug("No database, waiting for dbschema.json...") - self.site.needFile("dbschema.json", priority=3) - self.log.debug("Got dbschema.json") - self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist - if self.has_db: - schema = self.getDbSchema() - db_path = self.getPath(schema["db_file"]) - if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: - try: - self.rebuildDb(reason="Missing database") - except Exception as err: - self.log.error(err) - pass - - if self.db: - self.db.close("Gettig new db for SiteStorage") - self.db = self.openDb(close_idle=True) - try: - changed_tables = self.db.checkTables() - if changed_tables: - self.rebuildDb(delete_db=False, reason="Changed tables") # TODO: only update the changed table datas - except sqlite3.OperationalError: - pass - # Return db class - @util.Noparallel() def getDb(self): - if self.event_db_busy: # Db not ready for queries - self.log.debug("Wating for db...") - self.event_db_busy.get() # Wait for event if not self.db: - self.loadDb() + self.log.debug("No database, waiting for dbschema.json...") + self.site.needFile("dbschema.json", priority=3) + self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist + if self.has_db: + self.openDb() return self.db - def updateDbFile(self, inner_path, file=None, cur=None): - path = self.getPath(inner_path) - if cur: - db = cur.db - else: - db = self.getDb() - return db.updateJson(path, file, cur) - - # Return possible db files for the site - @thread_pool_fs_read.wrap - def getDbFiles(self): - found = 0 - for content_inner_path, content in self.site.content_manager.contents.items(): - # content.json file itself - if self.isFile(content_inner_path): - yield content_inner_path, self.getPath(content_inner_path) - else: - self.log.debug("[MISSING] %s" % content_inner_path) - # Data files in content.json - content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site - for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()): - if not file_relative_path.endswith(".json") and not file_relative_path.endswith("json.gz"): - continue # We only interesed in json files - file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir - file_inner_path = file_inner_path.strip("/") # Strip leading / - if self.isFile(file_inner_path): - yield file_inner_path, self.getPath(file_inner_path) - else: - self.log.debug("[MISSING] %s" % file_inner_path) - found += 1 - if found % 100 == 0: - time.sleep(0.001) # Context switch to avoid UI block - # Rebuild sql cache - @util.Noparallel() - @thread_pool_fs_batch.wrap - def rebuildDb(self, delete_db=True, reason="Unknown"): - self.log.info("Rebuilding db (reason: %s)..." % reason) + def rebuildDb(self, delete_db=True): self.has_db = self.isFile("dbschema.json") if not self.has_db: return False - + self.event_db_busy = gevent.event.AsyncResult() schema = self.loadJson("dbschema.json") db_path = self.getPath(schema["db_file"]) if os.path.isfile(db_path) and delete_db: if self.db: - self.closeDb("rebuilding") # Close db if open - time.sleep(0.5) + self.db.close() # Close db if open self.log.info("Deleting %s" % db_path) try: os.unlink(db_path) - except Exception as err: + except Exception, err: self.log.error("Delete error: %s" % err) - - if not self.db: - self.db = self.openDb() - self.event_db_busy = gevent.event.AsyncResult() - - self.log.info("Rebuild: Creating tables...") - - # raise DbTableError if not valid + self.openDb(check=False) + self.log.info("Creating tables...") self.db.checkTables() - + self.log.info("Importing data...") cur = self.db.getCursor() + cur.execute("BEGIN") cur.logging = False + found = 0 s = time.time() - self.log.info("Rebuild: Getting db files...") - db_files = list(self.getDbFiles()) - num_imported = 0 - num_total = len(db_files) - num_error = 0 - - self.log.info("Rebuild: Importing data...") - try: - if num_total > 100: - self.site.messageWebsocket( - _["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format( - "0000", num_total, num_error - ), "rebuild", 0 - ) - for file_inner_path, file_path in db_files: - try: - if self.updateDbFile(file_inner_path, file=open(file_path, "rb"), cur=cur): - num_imported += 1 - except Exception as err: - self.log.error("Error importing %s: %s" % (file_inner_path, Debug.formatException(err))) - num_error += 1 - - if num_imported and num_imported % 100 == 0: - self.site.messageWebsocket( - _["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format( - num_imported, num_total, num_error - ), - "rebuild", int(float(num_imported) / num_total * 100) - ) - time.sleep(0.001) # Context switch to avoid UI block - - finally: - cur.close() - if num_total > 100: - self.site.messageWebsocket( - _["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format( - num_imported, num_total, num_error - ), "rebuild", 100 - ) - self.log.info("Rebuild: Imported %s data file in %.3fs" % (num_imported, time.time() - s)) - self.event_db_busy.set(True) # Event done, notify waiters - self.event_db_busy = None # Clear event - self.db.commit("Rebuilt") - - return True + for content_inner_path, content in self.site.content_manager.contents.items(): + content_path = self.getPath(content_inner_path) + if os.path.isfile(content_path): # Missing content.json file + if self.db.loadJson(content_path, cur=cur): + found += 1 + else: + self.log.error("[MISSING] %s" % content_inner_path) + for file_relative_path in content["files"].keys(): + if not file_relative_path.endswith(".json"): + continue # We only interesed in json files + content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site + file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir + file_inner_path = file_inner_path.strip("/") # Strip leading / + file_path = self.getPath(file_inner_path) + if os.path.isfile(file_path): + if self.db.loadJson(file_path, cur=cur): + found += 1 + else: + self.log.error("[MISSING] %s" % file_inner_path) + cur.execute("END") + self.log.info("Imported %s data file in %ss" % (found, time.time() - s)) + self.event_db_busy.set(True) # Event done, notify waiters + self.event_db_busy = None # Clear event # Execute sql query or rebuild on dberror def query(self, query, params=None): - if not query.strip().upper().startswith("SELECT"): - raise Exception("Only SELECT query supported") - + if self.event_db_busy: # Db not ready for queries + self.log.debug("Wating for db...") + self.event_db_busy.get() # Wait for event try: res = self.getDb().execute(query, params) - except sqlite3.DatabaseError as err: + except sqlite3.DatabaseError, err: if err.__class__.__name__ == "DatabaseError": self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query)) - try: - self.rebuildDb(reason="Query error") - except sqlite3.OperationalError: - pass + self.rebuildDb() res = self.db.cur.execute(query, params) else: raise err return res - def ensureDir(self, inner_path): - try: - os.makedirs(self.getPath(inner_path)) - except OSError as err: - if err.errno == errno.EEXIST: - return False - else: - raise err - return True + # Open file object + def open(self, inner_path, mode="rb"): + return open(self.getPath(inner_path), mode) # Open file object - def open(self, inner_path, mode="rb", create_dirs=False, **kwargs): - file_path = self.getPath(inner_path) - if create_dirs: - file_inner_dir = os.path.dirname(inner_path) - self.ensureDir(file_inner_dir) - return open(file_path, mode, **kwargs) - - # Open file object - @thread_pool_fs_read.wrap - def read(self, inner_path, mode="rb"): - return self.open(inner_path, mode).read() - - @thread_pool_fs_write.wrap - def writeThread(self, inner_path, content): - file_path = self.getPath(inner_path) - # Create dir if not exist - self.ensureDir(os.path.dirname(inner_path)) - # Write file - if hasattr(content, 'read'): # File-like object - - with open(file_path, "wb") as file: - shutil.copyfileobj(content, file) # Write buff to disk - else: # Simple string - if inner_path == "content.json" and os.path.isfile(file_path): - helper.atomicWrite(file_path, content) - else: - with open(file_path, "wb") as file: - file.write(content) + def read(self, inner_path, mode="r"): + return open(self.getPath(inner_path), mode).read() # Write content to file def write(self, inner_path, content): - self.writeThread(inner_path, content) + file_path = self.getPath(inner_path) + # Create dir if not exist + file_dir = os.path.dirname(file_path) + if not os.path.isdir(file_dir): + os.makedirs(file_dir) + # Write file + if hasattr(content, 'read'): # File-like object + with open(file_path, "wb") as file: + shutil.copyfileobj(content, file) # Write buff to disk + else: # Simple string + with open(file_path, "wb") as file: + file.write(content) + del content self.onUpdated(inner_path) # Remove file from filesystem def delete(self, inner_path): file_path = self.getPath(inner_path) os.unlink(file_path) - self.onUpdated(inner_path, file=False) - - def deleteDir(self, inner_path): - dir_path = self.getPath(inner_path) - os.rmdir(dir_path) - - def rename(self, inner_path_before, inner_path_after): - for retry in range(3): - rename_err = None - # To workaround "The process cannot access the file beacause it is being used by another process." error - try: - os.rename(self.getPath(inner_path_before), self.getPath(inner_path_after)) - break - except Exception as err: - rename_err = err - self.log.error("%s rename error: %s (retry #%s)" % (inner_path_before, err, retry)) - time.sleep(0.1 + retry) - if rename_err: - raise rename_err # List files from a directory - @thread_pool_fs_read.wrap - def walk(self, dir_inner_path, ignore=None): + def list(self, dir_inner_path): directory = self.getPath(dir_inner_path) for root, dirs, files in os.walk(directory): root = root.replace("\\", "/") root_relative_path = re.sub("^%s" % re.escape(directory), "", root).lstrip("/") for file_name in files: if root_relative_path: # Not root dir - file_relative_path = root_relative_path + "/" + file_name + yield root_relative_path + "/" + file_name else: - file_relative_path = file_name - - if ignore and SafeRe.match(ignore, file_relative_path): - continue - - yield file_relative_path - - # Don't scan directory that is in the ignore pattern - if ignore: - dirs_filtered = [] - for dir_name in dirs: - if root_relative_path: - dir_relative_path = root_relative_path + "/" + dir_name - else: - dir_relative_path = dir_name - - if ignore == ".*" or re.match(".*([|(]|^)%s([|)]|$)" % re.escape(dir_relative_path + "/.*"), ignore): - continue - - dirs_filtered.append(dir_name) - dirs[:] = dirs_filtered - - # list directories in a directory - @thread_pool_fs_read.wrap - def list(self, dir_inner_path): - directory = self.getPath(dir_inner_path) - return os.listdir(directory) + yield file_name # Site content updated - def onUpdated(self, inner_path, file=None): + def onUpdated(self, inner_path): + file_path = self.getPath(inner_path) # Update Sql cache - should_load_to_db = inner_path.endswith(".json") or inner_path.endswith(".json.gz") if inner_path == "dbschema.json": self.has_db = self.isFile("dbschema.json") # Reopen DB to check changes - if self.has_db: - self.closeDb("New dbschema") - gevent.spawn(self.getDb) - elif not config.disable_db and should_load_to_db and self.has_db: # Load json file to db - if config.verbose: - self.log.debug("Loading json file to db: %s (file: %s)" % (inner_path, file)) + self.closeDb() + self.openDb() + elif inner_path.endswith(".json") and self.has_db: # Load json file to db + self.log.debug("Loading json file to db: %s" % inner_path) try: - self.updateDbFile(inner_path, file) - except Exception as err: + self.getDb().loadJson(file_path) + except Exception, err: self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err))) - self.closeDb("Json load error") + self.closeDb() # Load and parse json file - @thread_pool_fs_read.wrap def loadJson(self, inner_path): - try: - with self.open(inner_path, "r", encoding="utf8") as file: - return json.load(file) - except Exception as err: - self.log.warning("Json load error: %s" % Debug.formatException(err)) - return None + with self.open(inner_path) as file: + return json.load(file) # Write formatted json file def writeJson(self, inner_path, data): + content = json.dumps(data, indent=1, sort_keys=True) + # Make it a little more compact by removing unnecessary white space + + def compact_list(match): + return "[ " + match.group(1).strip() + " ]" + + def compact_dict(match): + return "{ " + match.group(1).strip() + " }" + + content = re.sub("\[([^,\{\[]{10,100}?)\]", compact_list, content, flags=re.DOTALL) + content = re.sub("\{([^,\[\{]{10,100}?)\}", compact_dict, content, flags=re.DOTALL) # Write to disk - self.write(inner_path, helper.jsonDumps(data).encode("utf8")) + self.write(inner_path, content) # Get file size def getSize(self, inner_path): path = self.getPath(inner_path) - try: + if os.path.isfile(path): return os.path.getsize(path) - except Exception: + else: return 0 # File exist @@ -404,233 +232,148 @@ class SiteStorage(object): # Security check and return path of site's file def getPath(self, inner_path): inner_path = inner_path.replace("\\", "/") # Windows separator fix + inner_path = re.sub("^%s/" % re.escape(self.directory), "", inner_path) # Remove site directory if begins with it + file_path = u"%s/%s" % (self.directory, inner_path) if not inner_path: return self.directory - if "../" in inner_path: - raise Exception("File not allowed: %s" % inner_path) - - return "%s/%s" % (self.directory, inner_path) + file_abspath = os.path.dirname(os.path.abspath(file_path)) + if ".." in file_path or not file_abspath.startswith(self.allowed_dir): + raise Exception(u"File not allowed: %s" % file_path) + return file_path # Get site dir relative path def getInnerPath(self, path): if path == self.directory: inner_path = "" else: - if path.startswith(self.directory): - inner_path = path[len(self.directory) + 1:] - else: - raise Exception("File not allowed: %s" % path) + inner_path = re.sub("^%s/" % re.escape(self.directory), "", path) return inner_path # Verify all files sha512sum using content.json def verifyFiles(self, quick_check=False, add_optional=False, add_changed=True): bad_files = [] - back = defaultdict(int) - back["bad_files"] = bad_files - i = 0 - self.log.debug("Verifing files...") if not self.site.content_manager.contents.get("content.json"): # No content.json, download it first - self.log.debug("VerifyFile content.json not exists") self.site.needFile("content.json", update=True) # Force update to fix corrupt file self.site.content_manager.loadContent() # Reload content.json - for content_inner_path, content in list(self.site.content_manager.contents.items()): - back["num_content"] += 1 - i += 1 - if i % 50 == 0: - time.sleep(0.001) # Context switch to avoid gevent hangs + for content_inner_path, content in self.site.content_manager.contents.items(): if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file - back["num_content_missing"] += 1 self.log.debug("[MISSING] %s" % content_inner_path) bad_files.append(content_inner_path) - for file_relative_path in list(content.get("files", {}).keys()): - back["num_file"] += 1 + for file_relative_path in content.get("files", {}).keys(): file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir file_inner_path = file_inner_path.strip("/") # Strip leading / file_path = self.getPath(file_inner_path) if not os.path.isfile(file_path): - back["num_file_missing"] += 1 self.log.debug("[MISSING] %s" % file_inner_path) bad_files.append(file_inner_path) continue if quick_check: ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"] - if not ok: - err = "Invalid size" else: - try: - ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) - except Exception as _err: - err = _err - ok = False + ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) if not ok: - back["num_file_invalid"] += 1 - self.log.debug("[INVALID] %s: %s" % (file_inner_path, err)) - if add_changed or content.get("cert_user_id"): # If updating own site only add changed user files + self.log.debug("[CHANGED] %s" % file_inner_path) + if add_changed: bad_files.append(file_inner_path) # Optional files optional_added = 0 optional_removed = 0 - for file_relative_path in list(content.get("files_optional", {}).keys()): - back["num_optional"] += 1 - file_node = content["files_optional"][file_relative_path] + for file_relative_path in content.get("files_optional", {}).keys(): file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir file_inner_path = file_inner_path.strip("/") # Strip leading / file_path = self.getPath(file_inner_path) - hash_id = self.site.content_manager.hashfield.getHashId(file_node["sha512"]) if not os.path.isfile(file_path): - if self.site.content_manager.isDownloaded(file_inner_path, hash_id): - back["num_optional_removed"] += 1 - self.log.debug("[OPTIONAL MISSING] %s" % file_inner_path) - self.site.content_manager.optionalRemoved(file_inner_path, hash_id, file_node["size"]) - if add_optional and self.site.isDownloadable(file_inner_path): - self.log.debug("[OPTIONAL ADDING] %s" % file_inner_path) + self.site.content_manager.hashfield.removeHash(content["files_optional"][file_relative_path]["sha512"]) + if add_optional: bad_files.append(file_inner_path) continue if quick_check: ok = os.path.getsize(file_path) == content["files_optional"][file_relative_path]["size"] else: - try: - ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) - except Exception as err: - ok = False + ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) if ok: - if not self.site.content_manager.isDownloaded(file_inner_path, hash_id): - back["num_optional_added"] += 1 - self.site.content_manager.optionalDownloaded(file_inner_path, hash_id, file_node["size"]) - optional_added += 1 - self.log.debug("[OPTIONAL FOUND] %s" % file_inner_path) + self.site.content_manager.hashfield.appendHash(content["files_optional"][file_relative_path]["sha512"]) + optional_added += 1 else: - if self.site.content_manager.isDownloaded(file_inner_path, hash_id): - back["num_optional_removed"] += 1 - self.site.content_manager.optionalRemoved(file_inner_path, hash_id, file_node["size"]) - optional_removed += 1 - bad_files.append(file_inner_path) + self.site.content_manager.hashfield.removeHash(content["files_optional"][file_relative_path]["sha512"]) + optional_removed += 1 + if add_optional: + bad_files.append(file_inner_path) self.log.debug("[OPTIONAL CHANGED] %s" % file_inner_path) - if config.verbose: - self.log.debug( - "%s verified: %s, quick: %s, optionals: +%s -%s" % - (content_inner_path, len(content["files"]), quick_check, optional_added, optional_removed) - ) + self.log.debug( + "%s verified: %s, quick: %s, bad: %s, optionals: +%s -%s" % + (content_inner_path, len(content["files"]), quick_check, bad_files, optional_added, optional_removed) + ) - self.site.content_manager.contents.db.processDelayed() - time.sleep(0.001) # Context switch to avoid gevent hangs - return back + return bad_files # Check and try to fix site files integrity - def updateBadFiles(self, quick_check=True): + def checkFiles(self, quick_check=True): s = time.time() - res = self.verifyFiles( + bad_files = self.verifyFiles( quick_check, - add_optional=True, + add_optional=self.site.settings.get("autodownloadoptional"), add_changed=not self.site.settings.get("own") # Don't overwrite changed files if site owned ) - bad_files = res["bad_files"] self.site.bad_files = {} if bad_files: for bad_file in bad_files: self.site.bad_files[bad_file] = 1 - self.log.debug("Checked files in %.2fs... Found bad files: %s, Quick:%s" % (time.time() - s, len(bad_files), quick_check)) + self.log.debug("Checked files in %.2fs... Quick:%s" % (time.time() - s, quick_check)) # Delete site's all file - @thread_pool_fs_batch.wrap def deleteFiles(self): - site_title = self.site.content_manager.contents.get("content.json", {}).get("title", self.site.address) - message_id = "delete-%s" % self.site.address - self.log.debug("Deleting files from content.json (title: %s)..." % site_title) - - files = [] # Get filenames - content_inner_paths = list(self.site.content_manager.contents.keys()) - for i, content_inner_path in enumerate(content_inner_paths): - content = self.site.content_manager.contents.get(content_inner_path, {}) - files.append(content_inner_path) - # Add normal files - for file_relative_path in list(content.get("files", {}).keys()): - file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir - files.append(file_inner_path) - # Add optional files - for file_relative_path in list(content.get("files_optional", {}).keys()): - file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir - files.append(file_inner_path) - - if i % 100 == 0: - num_files = len(files) - self.site.messageWebsocket( - _("Deleting site {site_title}...
    Collected {num_files} files"), - message_id, (i / len(content_inner_paths)) * 25 - ) - - if self.isFile("dbschema.json"): + if self.has_db: self.log.debug("Deleting db file...") - self.closeDb("Deleting site") - self.has_db = False + self.closeDb() try: schema = self.loadJson("dbschema.json") db_path = self.getPath(schema["db_file"]) if os.path.isfile(db_path): os.unlink(db_path) - except Exception as err: + except Exception, err: self.log.error("Db file delete error: %s" % err) - num_files = len(files) - for i, inner_path in enumerate(files): + self.log.debug("Deleting files from content.json...") + files = [] # Get filenames + for content_inner_path, content in self.site.content_manager.contents.items(): + files.append(content_inner_path) + # Add normal files + for file_relative_path in content.get("files", {}).keys(): + file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir + files.append(file_inner_path) + # Add optional files + for file_relative_path in content.get("files_optional", {}).keys(): + file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir + files.append(file_inner_path) + + for inner_path in files: path = self.getPath(inner_path) if os.path.isfile(path): - for retry in range(5): - try: - os.unlink(path) - break - except Exception as err: - self.log.error("Error removing %s: %s, try #%s" % (inner_path, err, retry)) - time.sleep(float(retry) / 10) - if i % 100 == 0: - self.site.messageWebsocket( - _("Deleting site {site_title}...
    Deleting file {i}/{num_files}"), - message_id, 25 + (i / num_files) * 50 - ) - self.onUpdated(inner_path, False) + os.unlink(path) self.log.debug("Deleting empty dirs...") - i = 0 for root, dirs, files in os.walk(self.directory, topdown=False): for dir in dirs: path = os.path.join(root, dir) - if os.path.isdir(path): - try: - i += 1 - if i % 100 == 0: - self.site.messageWebsocket( - _("Deleting site {site_title}...
    Deleting empty directories {i}"), - message_id, 85 - ) - os.rmdir(path) - except OSError: # Not empty - pass - + if os.path.isdir(path) and os.listdir(path) == []: + os.removedirs(path) + self.log.debug("Removing %s" % path) if os.path.isdir(self.directory) and os.listdir(self.directory) == []: - os.rmdir(self.directory) # Remove sites directory if empty + os.removedirs(self.directory) # Remove sites directory if empty if os.path.isdir(self.directory): self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory) - self.site.messageWebsocket( - _("Deleting site {site_title}...
    Site deleted, but some unknown files left in the directory"), - message_id, 100 - ) return False # Some files not deleted else: - self.log.debug("Site %s data directory deleted: %s..." % (site_title, self.directory)) - - self.site.messageWebsocket( - _("Deleting site {site_title}...
    All files deleted successfully"), - message_id, 100 - ) - + self.log.debug("Site data directory deleted: %s..." % self.directory) return True # All clean diff --git a/src/Site/__init__.py b/src/Site/__init__.py index e69de29b..cc830ae8 100644 --- a/src/Site/__init__.py +++ b/src/Site/__init__.py @@ -0,0 +1,2 @@ +from Site import Site +from SiteStorage import SiteStorage \ No newline at end of file diff --git a/src/Test/BenchmarkSsl.py b/src/Test/BenchmarkSsl.py index 06181b89..fec19474 100644 --- a/src/Test/BenchmarkSsl.py +++ b/src/Test/BenchmarkSsl.py @@ -6,17 +6,16 @@ import time import sys import socket import ssl -sys.path.append(os.path.abspath("..")) # Imports relative to src dir +sys.path.append(os.path.abspath("src")) # Imports relative to src dir -import io as StringIO +import cStringIO as StringIO import gevent from gevent.server import StreamServer from gevent.pool import Pool -from Config import config -config.parse() from util import SslPatch + # Server socks = [] data = os.urandom(1024 * 100) @@ -41,13 +40,13 @@ def handle(sock_raw, addr): elif line == "gotssl\n": sock.sendall("yes\n") sock = gevent.ssl.wrap_socket( - sock_raw, server_side=True, keyfile='../../data/key-rsa.pem', certfile='../../data/cert-rsa.pem', + sock_raw, server_side=True, keyfile='data/key-rsa.pem', certfile='data/cert-rsa.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1 ) else: sock.sendall(data) - except Exception as err: - print(err) + except Exception, err: + print err try: sock.shutdown(gevent.socket.SHUT_WR) sock.close() @@ -87,7 +86,7 @@ def getData(): sock.do_handshake() clipher = sock.cipher() - for req in range(20): + for req in range(100): sock.sendall("req\n") buff = StringIO.StringIO() data = sock.recv(16 * 1024) @@ -102,7 +101,7 @@ def getData(): total_num += 1 total_bytes += buff.tell() if not data: - print("No data") + print "No data" sock.shutdown(gevent.socket.SHUT_WR) sock.close() @@ -119,20 +118,20 @@ def info(): else: memory_info = process.get_memory_info while 1: - print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, end=' ') - print("using", clipher, "Mem:", memory_info()[0] / float(2 ** 20)) + print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, + print "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20) time.sleep(1) gevent.spawn(info) -for test in range(1): +for test in range(10): clients = [] - for i in range(500): # Thread + for i in range(10): # Thread clients.append(gevent.spawn(getData)) gevent.joinall(clients) -print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s) +print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s # Separate client/server process: # 10*10*100: diff --git a/src/Test/Spy.py b/src/Test/Spy.py index 44422550..c017dea9 100644 --- a/src/Test/Spy.py +++ b/src/Test/Spy.py @@ -1,23 +1,17 @@ -import logging - class Spy: - def __init__(self, obj, func_name): - self.obj = obj - self.__name__ = func_name - self.func_original = getattr(self.obj, func_name) - self.calls = [] + def __init__(self, obj, func_name): + self.obj = obj + self.func_name = func_name + self.func_original = getattr(self.obj, func_name) + self.calls = [] - def __enter__(self, *args, **kwargs): - logging.debug("Spy started") - def loggedFunc(cls, *args, **kwargs): - call = dict(enumerate(args, 1)) - call[0] = cls - call.update(kwargs) - logging.debug("Spy call: %s" % call) - self.calls.append(call) - return self.func_original(cls, *args, **kwargs) - setattr(self.obj, self.__name__, loggedFunc) - return self.calls + def __enter__(self, *args, **kwargs): + def loggedFunc(cls, *args, **kwags): + print "Logging", self, args, kwargs + self.calls.append(args) + return self.func_original(cls, *args, **kwargs) + setattr(self.obj, self.func_name, loggedFunc) + return self.calls - def __exit__(self, *args, **kwargs): - setattr(self.obj, self.__name__, self.func_original) \ No newline at end of file + def __exit__(self, *args, **kwargs): + setattr(self.obj, self.func_name, self.func_original) \ No newline at end of file diff --git a/src/Test/TestCached.py b/src/Test/TestCached.py deleted file mode 100644 index 088962c0..00000000 --- a/src/Test/TestCached.py +++ /dev/null @@ -1,59 +0,0 @@ -import time - -from util import Cached - - -class CachedObject: - def __init__(self): - self.num_called_add = 0 - self.num_called_multiply = 0 - self.num_called_none = 0 - - @Cached(timeout=1) - def calcAdd(self, a, b): - self.num_called_add += 1 - return a + b - - @Cached(timeout=1) - def calcMultiply(self, a, b): - self.num_called_multiply += 1 - return a * b - - @Cached(timeout=1) - def none(self): - self.num_called_none += 1 - return None - - -class TestCached: - def testNoneValue(self): - cached_object = CachedObject() - assert cached_object.none() is None - assert cached_object.none() is None - assert cached_object.num_called_none == 1 - time.sleep(2) - assert cached_object.none() is None - assert cached_object.num_called_none == 2 - - def testCall(self): - cached_object = CachedObject() - - assert cached_object.calcAdd(1, 2) == 3 - assert cached_object.calcAdd(1, 2) == 3 - assert cached_object.calcMultiply(1, 2) == 2 - assert cached_object.calcMultiply(1, 2) == 2 - assert cached_object.num_called_add == 1 - assert cached_object.num_called_multiply == 1 - - assert cached_object.calcAdd(2, 3) == 5 - assert cached_object.calcAdd(2, 3) == 5 - assert cached_object.num_called_add == 2 - - assert cached_object.calcAdd(1, 2) == 3 - assert cached_object.calcMultiply(2, 3) == 6 - assert cached_object.num_called_add == 2 - assert cached_object.num_called_multiply == 2 - - time.sleep(2) - assert cached_object.calcAdd(1, 2) == 3 - assert cached_object.num_called_add == 3 diff --git a/src/Test/TestConnectionServer.py b/src/Test/TestConnectionServer.py index 82ee605c..94175ffb 100644 --- a/src/Test/TestConnectionServer.py +++ b/src/Test/TestConnectionServer.py @@ -1,66 +1,43 @@ import time -import socket import gevent import pytest -import mock from Crypt import CryptConnection from Connection import ConnectionServer -from Config import config @pytest.mark.usefixtures("resetSettings") class TestConnection: - def testIpv6(self, file_server6): - assert ":" in file_server6.ip + def testSslConnection(self, file_server): + file_server.ip_incoming = {} # Reset flood protection + client = ConnectionServer("127.0.0.1", 1545) + assert file_server != client - client = ConnectionServer(file_server6.ip, 1545) - connection = client.getConnection(file_server6.ip, 1544) - - assert connection.ping() + # Connect to myself + connection = client.getConnection("127.0.0.1", 1544) + assert len(file_server.connections) == 1 + assert len(file_server.ips) == 1 + assert connection.handshake + assert connection.crypt # Close connection connection.close() client.stop() time.sleep(0.01) - assert len(file_server6.connections) == 0 - - # Should not able to reach on ipv4 ip - with pytest.raises(socket.error) as err: - client = ConnectionServer("127.0.0.1", 1545) - connection = client.getConnection("127.0.0.1", 1544) - - def testSslConnection(self, file_server): - client = ConnectionServer(file_server.ip, 1545) - assert file_server != client - - # Connect to myself - with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips - connection = client.getConnection(file_server.ip, 1544) - - assert len(file_server.connections) == 1 - assert connection.handshake - assert connection.crypt - - - # Close connection - connection.close("Test ended") - client.stop() - time.sleep(0.1) assert len(file_server.connections) == 0 - assert file_server.num_incoming == 2 # One for file_server fixture, one for the test + assert len(file_server.ips) == 0 def testRawConnection(self, file_server): - client = ConnectionServer(file_server.ip, 1545) + file_server.ip_incoming = {} # Reset flood protection + client = ConnectionServer("127.0.0.1", 1545) assert file_server != client # Remove all supported crypto crypt_supported_bk = CryptConnection.manager.crypt_supported CryptConnection.manager.crypt_supported = [] - with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips - connection = client.getConnection(file_server.ip, 1544) + connection = client.getConnection("127.0.0.1", 1544) assert len(file_server.connections) == 1 assert not connection.crypt @@ -74,8 +51,9 @@ class TestConnection: CryptConnection.manager.crypt_supported = crypt_supported_bk def testPing(self, file_server, site): - client = ConnectionServer(file_server.ip, 1545) - connection = client.getConnection(file_server.ip, 1544) + file_server.ip_incoming = {} # Reset flood protection + client = ConnectionServer("127.0.0.1", 1545) + connection = client.getConnection("127.0.0.1", 1544) assert connection.ping() @@ -83,36 +61,40 @@ class TestConnection: client.stop() def testGetConnection(self, file_server): - client = ConnectionServer(file_server.ip, 1545) - connection = client.getConnection(file_server.ip, 1544) + file_server.ip_incoming = {} # Reset flood protection + client = ConnectionServer("127.0.0.1", 1545) + connection = client.getConnection("127.0.0.1", 1544) # Get connection by ip/port - connection2 = client.getConnection(file_server.ip, 1544) + connection2 = client.getConnection("127.0.0.1", 1544) assert connection == connection2 # Get connection by peerid - assert not client.getConnection(file_server.ip, 1544, peer_id="notexists", create=False) - connection2 = client.getConnection(file_server.ip, 1544, peer_id=connection.handshake["peer_id"], create=False) + assert not client.getConnection("127.0.0.1", 1544, peer_id="notexists", create=False) + connection2 = client.getConnection("127.0.0.1", 1544, peer_id=connection.handshake["peer_id"], create=False) assert connection2 == connection connection.close() client.stop() def testFloodProtection(self, file_server): - whitelist = file_server.whitelist # Save for reset - file_server.whitelist = [] # Disable 127.0.0.1 whitelist - client = ConnectionServer(file_server.ip, 1545) + file_server.ip_incoming = {} # Reset flood protection + client = ConnectionServer("127.0.0.1", 1545) - # Only allow 6 connection in 1 minute - for reconnect in range(6): - connection = client.getConnection(file_server.ip, 1544) - assert connection.handshake - connection.close() + # Only allow 3 connection in 1 minute + connection = client.getConnection("127.0.0.1", 1544) + assert connection.handshake + connection.close() - # The 7. one will timeout + connection = client.getConnection("127.0.0.1", 1544) + assert connection.handshake + connection.close() + + connection = client.getConnection("127.0.0.1", 1544) + assert connection.handshake + connection.close() + + # The 4. one will timeout with pytest.raises(gevent.Timeout): with gevent.Timeout(0.1): - connection = client.getConnection(file_server.ip, 1544) - - # Reset whitelist - file_server.whitelist = whitelist + connection = client.getConnection("127.0.0.1", 1544) diff --git a/src/Test/TestContent.py b/src/Test/TestContent.py index 7e7ca1a5..17ae9a55 100644 --- a/src/Test/TestContent.py +++ b/src/Test/TestContent.py @@ -1,18 +1,14 @@ import json import time -import io +from cStringIO import StringIO import pytest from Crypt import CryptBitcoin -from Content.ContentManager import VerifyError, SignError -from util.SafeRe import UnsafePatternError @pytest.mark.usefixtures("resetSettings") class TestContent: - privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" - def testInclude(self, site): # Rules defined in parent content.json rules = site.content_manager.getRules("data/test_include/content.json") @@ -38,9 +34,9 @@ class TestContent: # Valid signers for root content.json assert site.content_manager.getValidSigners("content.json") == ["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] - def testInlcudeLimits(self, site, crypt_bitcoin_lib): + def testInlcudeLimits(self, site): + privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # Data validation - res = [] data_dict = { "files": { "data.json": { @@ -52,52 +48,42 @@ class TestContent: } # Normal data - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)} - data_json = json.dumps(data_dict).encode() - data = io.BytesIO(data_json) + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)} + data = StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) - # Reset del data_dict["signs"] # Too large data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)} - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) - assert "Include too large" in str(err.value) - + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)} + data = StringIO(json.dumps(data_dict)) + assert not site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) # Reset data_dict["files"]["data.json"]["size"] = 505 del data_dict["signs"] # Not allowed file data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"] - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)} - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) - assert "File not allowed" in str(err.value) - + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)} + data = StringIO(json.dumps(data_dict)) + assert not site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) # Reset del data_dict["files"]["notallowed.exe"] del data_dict["signs"] # Should work again - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)} - data = io.BytesIO(json.dumps(data_dict).encode()) + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)} + data = StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) @pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"]) def testSign(self, site, inner_path): # Bad privatekey - with pytest.raises(SignError) as err: - site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False) - assert "Private key invalid" in str(err.value) + assert not site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False) # Good privatekey - content = site.content_manager.sign(inner_path, privatekey=self.privatekey, filewrite=False) + content = site.content_manager.sign(inner_path, privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False) content_old = site.content_manager.contents[inner_path] # Content before the sign assert not content_old == content # Timestamp changed assert site.address in content["signs"] # Used the site's private key to sign @@ -116,16 +102,14 @@ class TestContent: ) def testSignOptionalFiles(self, site): - for hash in list(site.content_manager.hashfield): - site.content_manager.hashfield.remove(hash) - assert len(site.content_manager.hashfield) == 0 site.content_manager.contents["content.json"]["optional"] = "((data/img/zero.*))" - content_optional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True) + content_optional = site.content_manager.sign(privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False) + del site.content_manager.contents["content.json"]["optional"] - content_nooptional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True) + content_nooptional = site.content_manager.sign(privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False) assert len(content_nooptional.get("files_optional", {})) == 0 # No optional files if no pattern assert len(content_optional["files_optional"]) > 0 @@ -134,9 +118,6 @@ class TestContent: def testFileInfo(self, site): assert "sha512" in site.content_manager.getFileInfo("index.html") - assert site.content_manager.getFileInfo("data/img/domain.png")["content_inner_path"] == "content.json" - assert site.content_manager.getFileInfo("data/users/hello.png")["content_inner_path"] == "data/users/content.json" - assert site.content_manager.getFileInfo("data/users/content.json")["content_inner_path"] == "data/users/content.json" assert not site.content_manager.getFileInfo("notexist") # Optional file @@ -151,123 +132,3 @@ class TestContent: file_info_optional = site.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") assert "sha512" in file_info_optional assert file_info_optional["optional"] is True - - def testVerify(self, site, crypt_bitcoin_lib): - inner_path = "data/test_include/content.json" - data_dict = site.storage.loadJson(inner_path) - data = io.BytesIO(json.dumps(data_dict).encode("utf8")) - - # Re-sign - data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) - } - assert site.content_manager.verifyFile(inner_path, data, ignore_same=False) - - # Wrong address - data_dict["address"] = "Othersite" - del data_dict["signs"] - data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) - } - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(inner_path, data, ignore_same=False) - assert "Wrong site address" in str(err.value) - - # Wrong inner_path - data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" - data_dict["inner_path"] = "content.json" - del data_dict["signs"] - data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) - } - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(inner_path, data, ignore_same=False) - assert "Wrong inner_path" in str(err.value) - - # Everything right again - data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" - data_dict["inner_path"] = inner_path - del data_dict["signs"] - data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) - } - data = io.BytesIO(json.dumps(data_dict).encode()) - assert site.content_manager.verifyFile(inner_path, data, ignore_same=False) - - def testVerifyInnerPath(self, site, crypt_bitcoin_lib): - inner_path = "content.json" - data_dict = site.storage.loadJson(inner_path) - - for good_relative_path in ["data.json", "out/data.json", "Any File [by none] (1).jpg", "ÃĄrvzítÅąrő/tÃŧkÃļrfÃērÃŗgÊp.txt"]: - data_dict["files"] = {good_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}} - - if "sign" in data_dict: - del data_dict["sign"] - del data_dict["signs"] - data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) - } - data = io.BytesIO(json.dumps(data_dict).encode()) - assert site.content_manager.verifyFile(inner_path, data, ignore_same=False) - - for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg", "con.txt", "any/con.txt"]: - data_dict["files"] = {bad_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}} - - if "sign" in data_dict: - del data_dict["sign"] - del data_dict["signs"] - data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) - } - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(inner_path, data, ignore_same=False) - assert "Invalid relative path" in str(err.value) - - @pytest.mark.parametrize("key", ["ignore", "optional"]) - def testSignUnsafePattern(self, site, key): - site.content_manager.contents["content.json"][key] = "([a-zA-Z]+)*" - with pytest.raises(UnsafePatternError) as err: - site.content_manager.sign("content.json", privatekey=self.privatekey, filewrite=False) - assert "Potentially unsafe" in str(err.value) - - - def testVerifyUnsafePattern(self, site, crypt_bitcoin_lib): - site.content_manager.contents["content.json"]["includes"]["data/test_include/content.json"]["files_allowed"] = "([a-zA-Z]+)*" - with pytest.raises(UnsafePatternError) as err: - with site.storage.open("data/test_include/content.json") as data: - site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) - assert "Potentially unsafe" in str(err.value) - - site.content_manager.contents["data/users/content.json"]["user_contents"]["permission_rules"]["([a-zA-Z]+)*"] = {"max_size": 0} - with pytest.raises(UnsafePatternError) as err: - with site.storage.open("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") as data: - site.content_manager.verifyFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", data, ignore_same=False) - assert "Potentially unsafe" in str(err.value) - - def testPathValidation(self, site): - assert site.content_manager.isValidRelativePath("test.txt") - assert site.content_manager.isValidRelativePath("test/!@#$%^&().txt") - assert site.content_manager.isValidRelativePath("ÜøßÂŒƂÆÇ.txt") - assert site.content_manager.isValidRelativePath("Ņ‚Đĩҁ҂.Ņ‚ĐĩĐēҁ҂") - assert site.content_manager.isValidRelativePath("𝐮𝐧đĸ𝐜𝐨𝐝𝐞𝑖𝑠𝒂𝒘𝒆𝒔𝒐𝒎𝒆") - - # Test rules based on https://stackoverflow.com/questions/1976007/what-characters-are-forbidden-in-windows-and-linux-directory-names - - assert not site.content_manager.isValidRelativePath("any\\hello.txt") # \ not allowed - assert not site.content_manager.isValidRelativePath("/hello.txt") # Cannot start with / - assert not site.content_manager.isValidRelativePath("\\hello.txt") # Cannot start with \ - assert not site.content_manager.isValidRelativePath("../hello.txt") # Not allowed .. in path - assert not site.content_manager.isValidRelativePath("\0hello.txt") # NULL character - assert not site.content_manager.isValidRelativePath("\31hello.txt") # 0-31 (ASCII control characters) - assert not site.content_manager.isValidRelativePath("any/hello.txt ") # Cannot end with space - assert not site.content_manager.isValidRelativePath("any/hello.txt.") # Cannot end with dot - assert site.content_manager.isValidRelativePath(".hello.txt") # Allow start with dot - assert not site.content_manager.isValidRelativePath("any/CON") # Protected names on Windows - assert not site.content_manager.isValidRelativePath("CON/any.txt") - assert not site.content_manager.isValidRelativePath("any/lpt1.txt") - assert site.content_manager.isValidRelativePath("any/CONAN") - assert not site.content_manager.isValidRelativePath("any/CONOUT$") - assert not site.content_manager.isValidRelativePath("a" * 256) # Max 255 characters allowed diff --git a/src/Test/TestContentUser.py b/src/Test/TestContentUser.py index 8e91dd3e..ca2184fb 100644 --- a/src/Test/TestContentUser.py +++ b/src/Test/TestContentUser.py @@ -1,39 +1,27 @@ import json -import io +from cStringIO import StringIO import pytest from Crypt import CryptBitcoin -from Content.ContentManager import VerifyError, SignError @pytest.mark.usefixtures("resetSettings") -class TestContentUser: +class TestUserContent: def testSigners(self, site): # File info for not existing user file file_info = site.content_manager.getFileInfo("data/users/notexist/data.json") assert file_info["content_inner_path"] == "data/users/notexist/content.json" - file_info = site.content_manager.getFileInfo("data/users/notexist/a/b/data.json") - assert file_info["content_inner_path"] == "data/users/notexist/content.json" valid_signers = site.content_manager.getValidSigners("data/users/notexist/content.json") - assert valid_signers == ["14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet", "notexist", "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] + assert valid_signers == ["notexist", "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] # File info for exsitsing user file valid_signers = site.content_manager.getValidSigners("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json") assert '1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT' in valid_signers # The site address - assert '14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet' in valid_signers # Admin user defined in data/users/content.json + assert '14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet' in valid_signers # Admin user definied in data/users/content.json assert '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C' in valid_signers # The user itself assert len(valid_signers) == 3 # No more valid signers - # Valid signer for banned user - user_content = site.storage.loadJson("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json") - user_content["cert_user_id"] = "bad@zeroid.bit" - - valid_signers = site.content_manager.getValidSigners("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content) - assert '1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT' in valid_signers # The site address - assert '14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet' in valid_signers # Admin user defined in data/users/content.json - assert '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C' not in valid_signers # The user itself - def testRules(self, site): # We going to manipulate it this test rules based on data/users/content.json user_content = site.storage.loadJson("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json") @@ -43,84 +31,32 @@ class TestContentUser: user_content["cert_user_id"] = "nofish@zeroid.bit" rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content) assert rules["max_size"] == 100000 - assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"] # Unknown user user_content["cert_auth_type"] = "web" user_content["cert_user_id"] = "noone@zeroid.bit" rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content) assert rules["max_size"] == 10000 - assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"] # User with more size limit based on auth type user_content["cert_auth_type"] = "bitmsg" user_content["cert_user_id"] = "noone@zeroid.bit" rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content) assert rules["max_size"] == 15000 - assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"] # Banned user user_content["cert_auth_type"] = "web" user_content["cert_user_id"] = "bad@zeroid.bit" rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content) - assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" not in rules["signers"] - - def testRulesAddress(self, site): - user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json" - user_content = site.storage.loadJson(user_inner_path) - - rules = site.content_manager.getRules(user_inner_path, user_content) - assert rules["max_size"] == 10000 - assert "1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9" in rules["signers"] - - users_content = site.content_manager.contents["data/users/content.json"] - - # Ban user based on address - users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = False - rules = site.content_manager.getRules(user_inner_path, user_content) - assert "1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9" not in rules["signers"] - - # Change max allowed size - users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000} - rules = site.content_manager.getRules(user_inner_path, user_content) - assert rules["max_size"] == 20000 - - def testVerifyAddress(self, site): - privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT - user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json" - data_dict = site.storage.loadJson(user_inner_path) - users_content = site.content_manager.contents["data/users/content.json"] - - data = io.BytesIO(json.dumps(data_dict).encode()) - assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - - # Test error on 15k data.json - data_dict["files"]["data.json"]["size"] = 1024 * 15 - del data_dict["signs"] # Remove signs before signing - data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) - } - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Include too large" in str(err.value) - - # Give more space based on address - users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000} - del data_dict["signs"] # Remove signs before signing - data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) - } - data = io.BytesIO(json.dumps(data_dict).encode()) - assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) + assert rules is False def testVerify(self, site): privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json" - data_dict = site.storage.loadJson(user_inner_path) + data_dict = site.content_manager.contents[user_inner_path] users_content = site.content_manager.contents["data/users/content.json"] - data = io.BytesIO(json.dumps(data_dict).encode()) + data = StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) # Test max size exception by setting allowed to 0 @@ -131,11 +67,8 @@ class TestContentUser: users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 0 rules = site.content_manager.getRules(user_inner_path, data_dict) assert rules["max_size"] == 0 - data = io.BytesIO(json.dumps(data_dict).encode()) - - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Include too large" in str(err.value) + data = StringIO(json.dumps(data_dict)) + assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 10000 # Reset # Test max optional size exception @@ -145,7 +78,7 @@ class TestContentUser: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = io.BytesIO(json.dumps(data_dict).encode()) + data = StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) # 100 MB gif = Not allowed @@ -154,10 +87,8 @@ class TestContentUser: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Include optional files too large" in str(err.value) + data = StringIO(json.dumps(data_dict)) + assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024 # Reset # hello.exe = Not allowed @@ -166,22 +97,18 @@ class TestContentUser: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Optional file not allowed" in str(err.value) + data = StringIO(json.dumps(data_dict)) + assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) del data_dict["files_optional"]["hello.exe"] # Reset # Includes not allowed in user content - data_dict["includes"] = {"other.json": {}} + data_dict["includes"] = { "other.json": { } } del data_dict["signs"] # Remove signs before signing data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Includes not allowed" in str(err.value) + data = StringIO(json.dumps(data_dict)) + assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) def testCert(self, site): # user_addr = "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" @@ -227,19 +154,16 @@ class TestContentUser: # Test user cert assert site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False + StringIO(json.dumps(signed_content)), ignore_same=False ) # Test banned user cert_user_id = user_content["cert_user_id"] # My username site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] = False - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False - ) - assert "Valid signs: 0/1" in str(err.value) - del site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] # Reset + assert not site.content_manager.verifyFile( + "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", + StringIO(json.dumps(signed_content)), ignore_same=False + ) # Test invalid cert user_content["cert_sign"] = CryptBitcoin.sign( @@ -248,143 +172,7 @@ class TestContentUser: signed_content = site.content_manager.sign( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False ) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False - ) - assert "Invalid cert" in str(err.value) - - # Test banned user, signed by the site owner - user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % ( - user_content["cert_auth_type"], - user_content["cert_user_id"].split("@")[0] - ), cert_priv) - cert_user_id = user_content["cert_user_id"] # My username - site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] = False - - site_privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT - del user_content["signs"] # Remove signs before signing - user_content["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), site_privatekey) - } - assert site.content_manager.verifyFile( + assert not site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(user_content).encode()), ignore_same=False + StringIO(json.dumps(signed_content)), ignore_same=False ) - - def testMissingCert(self, site): - user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A" - cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA" - - user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"] - rules_content = site.content_manager.contents["data/users/content.json"] - - # Override valid cert signers for the test - rules_content["user_contents"]["cert_signers"]["zeroid.bit"] = [ - "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet", - "1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz" - ] - - # Sign a valid cert - user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % ( - user_content["cert_auth_type"], - user_content["cert_user_id"].split("@")[0] - ), cert_priv) - signed_content = site.content_manager.sign( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False - ) - - assert site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False - ) - - # Test invalid cert_user_id - user_content["cert_user_id"] = "nodomain" - user_content["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), user_priv) - } - signed_content = site.content_manager.sign( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False - ) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False - ) - assert "Invalid domain in cert_user_id" in str(err.value) - - # Test removed cert - del user_content["cert_user_id"] - del user_content["cert_auth_type"] - del user_content["signs"] # Remove signs before signing - user_content["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), user_priv) - } - signed_content = site.content_manager.sign( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False - ) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False - ) - assert "Missing cert_user_id" in str(err.value) - - - def testCertSignersPattern(self, site): - user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A" - cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA" # For 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet - - user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"] - rules_content = site.content_manager.contents["data/users/content.json"] - - # Override valid cert signers for the test - rules_content["user_contents"]["cert_signers_pattern"] = "14wgQ[0-9][A-Z]" - - # Sign a valid cert - user_content["cert_user_id"] = "certuser@14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" - user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % ( - user_content["cert_auth_type"], - "certuser" - ), cert_priv) - signed_content = site.content_manager.sign( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False - ) - - assert site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False - ) - - # Cert does not matches the pattern - rules_content["user_contents"]["cert_signers_pattern"] = "14wgX[0-9][A-Z]" - - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False - ) - assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value) - - # Removed cert_signers_pattern - del rules_content["user_contents"]["cert_signers_pattern"] - - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False - ) - assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value) - - - def testNewFile(self, site): - privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT - inner_path = "data/users/1NEWrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json" - - site.storage.writeJson(inner_path, {"test": "data"}) - site.content_manager.sign(inner_path, privatekey) - assert "test" in site.storage.loadJson(inner_path) - - site.storage.delete(inner_path) diff --git a/src/Test/TestCryptBitcoin.py b/src/Test/TestCryptBitcoin.py index 2bc087b5..bebf906e 100644 --- a/src/Test/TestCryptBitcoin.py +++ b/src/Test/TestCryptBitcoin.py @@ -2,37 +2,46 @@ from Crypt import CryptBitcoin class TestCryptBitcoin: - def testSign(self, crypt_bitcoin_lib): + def testSignOld(self): + privatekey = "23DKQpDz7bXM7w5KN5Wnmz7bwRNqNHcdQjb2WwrdB1QtTf5gM3pFdf" + privatekey_bad = "23DKQpDz7bXM7w5KN5Wnmz6bwRNqNHcdQjb2WwrdB1QtTf5gM3pFdf" + + # Get address by privatekey + address = CryptBitcoin.privatekeyToAddress(privatekey) + assert address == "12vTsjscg4hYPewUL2onma5pgQmWPMs3ez" + + address_bad = CryptBitcoin.privatekeyToAddress(privatekey_bad) + assert not address_bad == "12vTsjscg4hYPewUL2onma5pgQmWPMs3ez" + + # Text signing + sign = CryptBitcoin.signOld("hello", privatekey) + assert CryptBitcoin.verify("hello", address, sign) # Original text + assert not CryptBitcoin.verify("not hello", address, sign) # Different text + + # Signed by bad privatekey + sign_bad = CryptBitcoin.signOld("hello", privatekey_bad) + assert not CryptBitcoin.verify("hello", address, sign_bad) + + def testSign(self): privatekey = "5K9S6dVpufGnroRgFrT6wsKiz2mJRYsC73eWDmajaHserAp3F1C" privatekey_bad = "5Jbm9rrusXyApAoM8YoM4Rja337zMMoBUMRJ1uijiguU2aZRnwC" # Get address by privatekey - address = crypt_bitcoin_lib.privatekeyToAddress(privatekey) + address = CryptBitcoin.privatekeyToAddress(privatekey) assert address == "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz" - address_bad = crypt_bitcoin_lib.privatekeyToAddress(privatekey_bad) + address_bad = CryptBitcoin.privatekeyToAddress(privatekey_bad) assert address_bad != "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz" # Text signing - data_len_list = list(range(0, 300, 10)) - data_len_list += [1024, 2048, 1024 * 128, 1024 * 1024, 1024 * 2048] - for data_len in data_len_list: - data = data_len * "!" - sign = crypt_bitcoin_lib.sign(data, privatekey) + sign = CryptBitcoin.sign("hello", privatekey) - assert crypt_bitcoin_lib.verify(data, address, sign) - assert not crypt_bitcoin_lib.verify("invalid" + data, address, sign) + assert CryptBitcoin.verify("hello", address, sign) + assert not CryptBitcoin.verify("not hello", address, sign) # Signed by bad privatekey - sign_bad = crypt_bitcoin_lib.sign("hello", privatekey_bad) - assert not crypt_bitcoin_lib.verify("hello", address, sign_bad) - - def testVerify(self, crypt_bitcoin_lib): - sign_uncompressed = b'G6YkcFTuwKMVMHI2yycGQIFGbCZVNsZEZvSlOhKpHUt/BlADY94egmDAWdlrbbFrP9wH4aKcEfbLO8sa6f63VU0=' - assert crypt_bitcoin_lib.verify("1NQUem2M4cAqWua6BVFBADtcSP55P4QobM#web/gitcenter", "19Bir5zRm1yo4pw9uuxQL8xwf9b7jqMpR", sign_uncompressed) - - sign_compressed = b'H6YkcFTuwKMVMHI2yycGQIFGbCZVNsZEZvSlOhKpHUt/BlADY94egmDAWdlrbbFrP9wH4aKcEfbLO8sa6f63VU0=' - assert crypt_bitcoin_lib.verify("1NQUem2M4cAqWua6BVFBADtcSP55P4QobM#web/gitcenter", "1KH5BdNnqxh2KRWMMT8wUXzUgz4vVQ4S8p", sign_compressed) + sign_bad = CryptBitcoin.sign("hello", privatekey_bad) + assert not CryptBitcoin.verify("hello", address, sign_bad) def testNewPrivatekey(self): assert CryptBitcoin.newPrivatekey() != CryptBitcoin.newPrivatekey() diff --git a/src/Test/TestCryptHash.py b/src/Test/TestCryptHash.py deleted file mode 100644 index b91dbcca..00000000 --- a/src/Test/TestCryptHash.py +++ /dev/null @@ -1,31 +0,0 @@ -import base64 - -from Crypt import CryptHash - -sha512t_sum_hex = "2e9466d8aa1f340c91203b4ddbe9b6669879616a1b8e9571058a74195937598d" -sha512t_sum_bin = b".\x94f\xd8\xaa\x1f4\x0c\x91 ;M\xdb\xe9\xb6f\x98yaj\x1b\x8e\x95q\x05\x8at\x19Y7Y\x8d" -sha256_sum_hex = "340cd04be7f530e3a7c1bc7b24f225ba5762ec7063a56e1ae01a30d56722e5c3" - - -class TestCryptBitcoin: - - def testSha(self, site): - file_path = site.storage.getPath("dbschema.json") - assert CryptHash.sha512sum(file_path) == sha512t_sum_hex - assert CryptHash.sha512sum(open(file_path, "rb")) == sha512t_sum_hex - assert CryptHash.sha512sum(open(file_path, "rb"), format="digest") == sha512t_sum_bin - - assert CryptHash.sha256sum(file_path) == sha256_sum_hex - assert CryptHash.sha256sum(open(file_path, "rb")) == sha256_sum_hex - - with open(file_path, "rb") as f: - hash = CryptHash.Sha512t(f.read(100)) - hash.hexdigest() != sha512t_sum_hex - hash.update(f.read(1024 * 1024)) - assert hash.hexdigest() == sha512t_sum_hex - - def testRandom(self): - assert len(CryptHash.random(64)) == 64 - assert CryptHash.random() != CryptHash.random() - assert bytes.fromhex(CryptHash.random(encoding="hex")) - assert base64.b64decode(CryptHash.random(encoding="base64")) diff --git a/src/Test/TestDb.py b/src/Test/TestDb.py index 67f383a3..55ae103d 100644 --- a/src/Test/TestDb.py +++ b/src/Test/TestDb.py @@ -1,8 +1,44 @@ -import io +import os + +from Config import config +from Db import Db class TestDb: - def testCheckTables(self, db): + def testCheckTables(self): + db_path = "%s/zeronet.db" % config.data_dir + schema = { + "db_name": "TestDb", + "db_file": "%s/zeronet.db" % config.data_dir, + "map": { + "data.json": { + "to_table": { + "test": "test" + } + } + }, + "tables": { + "test": { + "cols": [ + ["test_id", "INTEGER"], + ["title", "TEXT"], + ], + "indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"], + "schema_changed": 1426195822 + } + } + } + + if os.path.isfile(db_path): + os.unlink(db_path) + db = Db(schema, db_path) + db.checkTables() + db.close() + + # Verify tables + assert os.path.isfile(db_path) + db = Db(schema, db_path) + tables = [row["name"] for row in db.execute("SELECT name FROM sqlite_master WHERE type='table'")] assert "keyvalue" in tables # To store simple key -> value assert "json" in tables # Json file path registry @@ -28,7 +64,40 @@ class TestDb: assert "test" in tables assert "newtest" in tables - def testQueries(self, db): + db.close() + + # Cleanup + os.unlink(db_path) + + def testQueries(self): + db_path = "%s/zeronet.db" % config.data_dir + schema = { + "db_name": "TestDb", + "db_file": "%s/zeronet.db" % config.data_dir, + "map": { + "data.json": { + "to_table": { + "test": "test" + } + } + }, + "tables": { + "test": { + "cols": [ + ["test_id", "INTEGER"], + ["title", "TEXT"], + ], + "indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"], + "schema_changed": 1426195822 + } + } + } + + if os.path.isfile(db_path): + os.unlink(db_path) + db = Db(schema, db_path) + db.checkTables() + # Test insert for i in range(100): db.execute("INSERT INTO test ?", {"test_id": i, "title": "Test #%s" % i}) @@ -39,99 +108,11 @@ class TestDb: assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": 1}).fetchone()["num"] == 1 # Test multiple select - assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": [1, 2, 3]}).fetchone()["num"] == 3 - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"test_id": [1, 2, 3], "title": "Test #2"} - ).fetchone()["num"] == 1 - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"test_id": [1, 2, 3], "title": ["Test #2", "Test #3", "Test #4"]} - ).fetchone()["num"] == 2 + assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": [1,2,3]}).fetchone()["num"] == 3 + assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": [1,2,3], "title": "Test #2"}).fetchone()["num"] == 1 + assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": [1,2,3], "title": ["Test #2", "Test #3", "Test #4"]}).fetchone()["num"] == 2 - # Test multiple select using named params - assert db.execute("SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id", {"test_id": [1, 2, 3]}).fetchone()["num"] == 3 - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id AND title = :title", - {"test_id": [1, 2, 3], "title": "Test #2"} - ).fetchone()["num"] == 1 - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id AND title IN :title", - {"test_id": [1, 2, 3], "title": ["Test #2", "Test #3", "Test #4"]} - ).fetchone()["num"] == 2 + db.close() - # Large ammount of IN values - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"not__test_id": list(range(2, 3000))} - ).fetchone()["num"] == 2 - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"test_id": list(range(50, 3000))} - ).fetchone()["num"] == 50 - - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"not__title": ["Test #%s" % i for i in range(50, 3000)]} - ).fetchone()["num"] == 50 - - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"title__like": "%20%"} - ).fetchone()["num"] == 1 - - # Test named parameter escaping - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE test_id = :test_id AND title LIKE :titlelike", - {"test_id": 1, "titlelike": "Test%"} - ).fetchone()["num"] == 1 - - def testEscaping(self, db): - # Test insert - for i in range(100): - db.execute("INSERT INTO test ?", {"test_id": i, "title": "Test '\" #%s" % i}) - - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"title": "Test '\" #1"} - ).fetchone()["num"] == 1 - - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"title": ["Test '\" #%s" % i for i in range(0, 50)]} - ).fetchone()["num"] == 50 - - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"not__title": ["Test '\" #%s" % i for i in range(50, 3000)]} - ).fetchone()["num"] == 50 - - - def testUpdateJson(self, db): - f = io.BytesIO() - f.write(""" - { - "test": [ - {"test_id": 1, "title": "Test 1 title", "extra col": "Ignore it"} - ] - } - """.encode()) - f.seek(0) - assert db.updateJson(db.db_dir + "data.json", f) is True - assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 1 - assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 1 - - def testUnsafePattern(self, db): - db.schema["maps"] = {"[A-Za-z.]*": db.schema["maps"]["data.json"]} # Only repetition of . supported - f = io.StringIO() - f.write(""" - { - "test": [ - {"test_id": 1, "title": "Test 1 title", "extra col": "Ignore it"} - ] - } - """) - f.seek(0) - assert db.updateJson(db.db_dir + "data.json", f) is False - assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 0 - assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 0 + # Cleanup + os.unlink(db_path) diff --git a/src/Test/TestDbQuery.py b/src/Test/TestDbQuery.py deleted file mode 100644 index 597bc950..00000000 --- a/src/Test/TestDbQuery.py +++ /dev/null @@ -1,31 +0,0 @@ -import re - -from Db.DbQuery import DbQuery - - -class TestDbQuery: - def testParse(self): - query_text = """ - SELECT - 'comment' AS type, - date_added, post.title AS title, - keyvalue.value || ': ' || comment.body AS body, - '?Post:' || comment.post_id || '#Comments' AS url - FROM - comment - LEFT JOIN json USING (json_id) - LEFT JOIN json AS json_content ON (json_content.directory = json.directory AND json_content.file_name='content.json') - LEFT JOIN keyvalue ON (keyvalue.json_id = json_content.json_id AND key = 'cert_user_id') - LEFT JOIN post ON (comment.post_id = post.post_id) - WHERE - post.date_added > 123 - ORDER BY - date_added DESC - LIMIT 20 - """ - query = DbQuery(query_text) - assert query.parts["LIMIT"] == "20" - assert query.fields["body"] == "keyvalue.value || ': ' || comment.body" - assert re.sub("[ \r\n]", "", str(query)) == re.sub("[ \r\n]", "", query_text) - query.wheres.append("body LIKE '%hello%'") - assert "body LIKE '%hello%'" in str(query) diff --git a/src/Test/TestDebug.py b/src/Test/TestDebug.py deleted file mode 100644 index e3eb20b3..00000000 --- a/src/Test/TestDebug.py +++ /dev/null @@ -1,52 +0,0 @@ -from Debug import Debug -import gevent -import os -import re - -import pytest - - -class TestDebug: - @pytest.mark.parametrize("items,expected", [ - (["@/src/A/B/C.py:17"], ["A/B/C.py line 17"]), # basic test - (["@/src/Db/Db.py:17"], ["Db.py line 17"]), # path compression - (["%s:1" % __file__], ["TestDebug.py line 1"]), - (["@/plugins/Chart/ChartDb.py:100"], ["ChartDb.py line 100"]), # plugins - (["@/main.py:17"], ["main.py line 17"]), # root - (["@\\src\\Db\\__init__.py:17"], ["Db/__init__.py line 17"]), # Windows paths - ([":1"], []), # importlib builtins - ([":1"], []), # importlib builtins - (["/home/ivanq/ZeroNet/src/main.py:13"], ["?/src/main.py line 13"]), # best-effort anonymization - (["C:\\ZeroNet\\core\\src\\main.py:13"], ["?/src/main.py line 13"]), - (["/root/main.py:17"], ["/root/main.py line 17"]), - (["{gevent}:13"], ["/__init__.py line 13"]), # modules - (["{os}:13"], [" line 13"]), # python builtin modules - (["src/gevent/event.py:17"], ["/event.py line 17"]), # gevent-overriden __file__ - (["@/src/Db/Db.py:17", "@/src/Db/DbQuery.py:1"], ["Db.py line 17", "DbQuery.py line 1"]), # mutliple args - (["@/src/Db/Db.py:17", "@/src/Db/Db.py:1"], ["Db.py line 17", "1"]), # same file - (["{os}:1", "@/src/Db/Db.py:17"], [" line 1", "Db.py line 17"]), # builtins - (["{gevent}:1"] + ["{os}:3"] * 4 + ["@/src/Db/Db.py:17"], ["/__init__.py line 1", "...", "Db.py line 17"]) - ]) - def testFormatTraceback(self, items, expected): - q_items = [] - for item in items: - file, line = item.rsplit(":", 1) - if file.startswith("@"): - file = Debug.root_dir + file[1:] - file = file.replace("{os}", os.__file__) - file = file.replace("{gevent}", gevent.__file__) - q_items.append((file, int(line))) - assert Debug.formatTraceback(q_items) == expected - - def testFormatException(self): - try: - raise ValueError("Test exception") - except Exception: - assert re.match(r"ValueError: Test exception in TestDebug.py line [0-9]+", Debug.formatException()) - try: - os.path.abspath(1) - except Exception: - assert re.search(r"in TestDebug.py line [0-9]+ > <(posixpath|ntpath)> line ", Debug.formatException()) - - def testFormatStack(self): - assert re.match(r"TestDebug.py line [0-9]+ > <_pytest>/python.py line [0-9]+", Debug.formatStack()) diff --git a/src/Test/TestDiff.py b/src/Test/TestDiff.py deleted file mode 100644 index 622951a1..00000000 --- a/src/Test/TestDiff.py +++ /dev/null @@ -1,58 +0,0 @@ -import io - -from util import Diff - - -class TestDiff: - def testDiff(self): - assert Diff.diff( - [], - ["one", "two", "three"] - ) == [("+", ["one", "two","three"])] - - assert Diff.diff( - ["one", "two", "three"], - ["one", "two", "three", "four", "five"] - ) == [("=", 11), ("+", ["four", "five"])] - - assert Diff.diff( - ["one", "two", "three", "six"], - ["one", "two", "three", "four", "five", "six"] - ) == [("=", 11), ("+", ["four", "five"]), ("=", 3)] - - assert Diff.diff( - ["one", "two", "three", "hmm", "six"], - ["one", "two", "three", "four", "five", "six"] - ) == [("=", 11), ("-", 3), ("+", ["four", "five"]), ("=", 3)] - - assert Diff.diff( - ["one", "two", "three"], - [] - ) == [("-", 11)] - - def testUtf8(self): - assert Diff.diff( - ["one", "\xe5\xad\xa6\xe4\xb9\xa0\xe4\xb8\x8b", "two", "three"], - ["one", "\xe5\xad\xa6\xe4\xb9\xa0\xe4\xb8\x8b", "two", "three", "four", "five"] - ) == [("=", 20), ("+", ["four", "five"])] - - def testDiffLimit(self): - old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix") - new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix") - actions = Diff.diff(list(old_f), list(new_f), limit=1024) - assert actions - - old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix") - new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix"*1024) - actions = Diff.diff(list(old_f), list(new_f), limit=1024) - assert actions is False - - def testPatch(self): - old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix") - new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix") - actions = Diff.diff( - list(old_f), - list(new_f) - ) - old_f.seek(0) - assert Diff.patch(old_f, actions).getvalue() == new_f.getvalue() diff --git a/src/Test/TestFileRequest.py b/src/Test/TestFileRequest.py index 3fabc271..e53c83dc 100644 --- a/src/Test/TestFileRequest.py +++ b/src/Test/TestFileRequest.py @@ -1,4 +1,4 @@ -import io +import cStringIO as StringIO import pytest import time @@ -13,21 +13,17 @@ from File import FileServer class TestFileRequest: def testGetFile(self, file_server, site): file_server.ip_incoming = {} # Reset flood protection - client = ConnectionServer(file_server.ip, 1545) + client = ConnectionServer("127.0.0.1", 1545) - connection = client.getConnection(file_server.ip, 1544) + connection = client.getConnection("127.0.0.1", 1544) file_server.sites[site.address] = site - # Normal request response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0}) - assert b"sign" in response["body"] - - response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json")}) - assert b"sign" in response["body"] + assert "sign" in response["body"] # Invalid file response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}) - assert "File read error" in response["error"] + assert "No such file or directory" in response["error"] # Location over size response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}) @@ -35,90 +31,62 @@ class TestFileRequest: # Stream from parent dir response = connection.request("getFile", {"site": site.address, "inner_path": "../users.json", "location": 0}) - assert "File read exception" in response["error"] - - # Invalid site - response = connection.request("getFile", {"site": "", "inner_path": "users.json", "location": 0}) - assert "Unknown site" in response["error"] - - response = connection.request("getFile", {"site": ".", "inner_path": "users.json", "location": 0}) - assert "Unknown site" in response["error"] - - # Invalid size - response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": 1234}) - assert "File size does not match" in response["error"] - - # Invalid path - for path in ["../users.json", "./../users.json", "data/../content.json", ".../users.json"]: - for sep in ["/", "\\"]: - response = connection.request("getFile", {"site": site.address, "inner_path": path.replace("/", sep), "location": 0}) - assert response["error"] == 'File read exception' + assert "File not allowed" in response["error"] connection.close() client.stop() def testStreamFile(self, file_server, site): file_server.ip_incoming = {} # Reset flood protection - client = ConnectionServer(file_server.ip, 1545) - connection = client.getConnection(file_server.ip, 1544) + client = ConnectionServer("127.0.0.1", 1545) + connection = client.getConnection("127.0.0.1", 1544) file_server.sites[site.address] = site - buff = io.BytesIO() + buff = StringIO.StringIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff) assert "stream_bytes" in response - assert b"sign" in buff.getvalue() + assert "sign" in buff.getvalue() # Invalid file - buff = io.BytesIO() + buff = StringIO.StringIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff) - assert "File read error" in response["error"] + assert "No such file or directory" in response["error"] # Location over size - buff = io.BytesIO() + buff = StringIO.StringIO() response = connection.request( "streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff ) assert "File read error" in response["error"] # Stream from parent dir - buff = io.BytesIO() + buff = StringIO.StringIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff) - assert "File read exception" in response["error"] + assert "File not allowed" in response["error"] connection.close() client.stop() def testPex(self, file_server, site, site_temp): file_server.sites[site.address] = site - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client = FileServer("127.0.0.1", 1545) + client.sites[site_temp.address] = site_temp site_temp.connection_server = client - connection = client.getConnection(file_server.ip, 1544) + connection = client.getConnection("127.0.0.1", 1544) # Add new fake peer to site - fake_peer = site.addPeer(file_server.ip_external, 11337, return_peer=True) + fake_peer = site.addPeer("1.2.3.4", 11337, return_peer=True) # Add fake connection to it - fake_peer.connection = Connection(file_server, file_server.ip_external, 11337) + fake_peer.connection = Connection(file_server, "1.2.3.4", 11337) fake_peer.connection.last_recv_time = time.time() assert fake_peer in site.getConnectablePeers() # Add file_server as peer to client - peer_file_server = site_temp.addPeer(file_server.ip, 1544) + peer_file_server = site_temp.addPeer("127.0.0.1", 1544) - assert "%s:11337" % file_server.ip_external not in site_temp.peers + assert "1.2.3.4:11337" not in site_temp.peers assert peer_file_server.pex() - assert "%s:11337" % file_server.ip_external in site_temp.peers - - # Should not exchange private peers from local network - fake_peer_private = site.addPeer("192.168.0.1", 11337, return_peer=True) - assert fake_peer_private not in site.getConnectablePeers(allow_private=False) - fake_peer_private.connection = Connection(file_server, "192.168.0.1", 11337) - fake_peer_private.connection.last_recv_time = time.time() - - assert "192.168.0.1:11337" not in site_temp.peers - assert not peer_file_server.pex() - assert "192.168.0.1:11337" not in site_temp.peers - + assert "1.2.3.4:11337" in site_temp.peers connection.close() client.stop() diff --git a/src/Test/TestFlag.py b/src/Test/TestFlag.py deleted file mode 100644 index 12fd8165..00000000 --- a/src/Test/TestFlag.py +++ /dev/null @@ -1,39 +0,0 @@ -import os - -import pytest - -from util.Flag import Flag - -class TestFlag: - def testFlagging(self): - flag = Flag() - @flag.admin - @flag.no_multiuser - def testFn(anything): - return anything - - assert "admin" in flag.db["testFn"] - assert "no_multiuser" in flag.db["testFn"] - - def testSubclassedFlagging(self): - flag = Flag() - class Test: - @flag.admin - @flag.no_multiuser - def testFn(anything): - return anything - - class SubTest(Test): - pass - - assert "admin" in flag.db["testFn"] - assert "no_multiuser" in flag.db["testFn"] - - def testInvalidFlag(self): - flag = Flag() - with pytest.raises(Exception) as err: - @flag.no_multiuser - @flag.unknown_flag - def testFn(anything): - return anything - assert "Invalid flag" in str(err.value) diff --git a/src/Test/TestHelper.py b/src/Test/TestHelper.py index 07644ec0..3b4a196f 100644 --- a/src/Test/TestHelper.py +++ b/src/Test/TestHelper.py @@ -1,10 +1,7 @@ import socket -import struct -import os import pytest from util import helper -from Config import config @pytest.mark.usefixtures("resetSettings") @@ -15,25 +12,13 @@ class TestHelper: assert helper.shellquote("hel'lo", 'hel"lo') == ('"hel\'lo"', '"hello"') def testPackAddress(self): - for port in [1, 1000, 65535]: - for ip in ["1.1.1.1", "127.0.0.1", "0.0.0.0", "255.255.255.255", "192.168.1.1"]: - assert len(helper.packAddress(ip, port)) == 6 - assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port) - - for ip in ["1:2:3:4:5:6:7:8", "::1", "2001:19f0:6c01:e76:5400:1ff:fed6:3eca", "2001:4860:4860::8888"]: - assert len(helper.packAddress(ip, port)) == 18 - assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port) - - assert len(helper.packOnionAddress("boot3rdez4rzn36x.onion", port)) == 12 - assert helper.unpackOnionAddress(helper.packOnionAddress("boot3rdez4rzn36x.onion", port)) == ("boot3rdez4rzn36x.onion", port) - - with pytest.raises(struct.error): - helper.packAddress("1.1.1.1", 100000) + assert len(helper.packAddress("1.1.1.1", 1)) == 6 + assert helper.unpackAddress(helper.packAddress("1.1.1.1", 1)) == ("1.1.1.1", 1) with pytest.raises(socket.error): helper.packAddress("999.1.1.1", 1) - with pytest.raises(Exception): + with pytest.raises(socket.error): helper.unpackAddress("X") def testGetDirname(self): @@ -42,7 +27,8 @@ class TestHelper: assert helper.getDirname("") == "" assert helper.getDirname("content.json") == "" assert helper.getDirname("data/users/") == "data/users/" - assert helper.getDirname("/data/users/content.json") == "data/users/" + assert helper.getDirname("/data/users/content.json") == "/data/users/" + def testGetFilename(self): assert helper.getFilename("data/users/content.json") == "content.json" @@ -50,30 +36,4 @@ class TestHelper: assert helper.getFilename("") == "" assert helper.getFilename("content.json") == "content.json" assert helper.getFilename("data/users/") == "" - assert helper.getFilename("/data/users/content.json") == "content.json" - - def testIsIp(self): - assert helper.isIp("1.2.3.4") - assert helper.isIp("255.255.255.255") - assert not helper.isIp("any.host") - assert not helper.isIp("1.2.3.4.com") - assert not helper.isIp("1.2.3.4.any.host") - - def testIsPrivateIp(self): - assert helper.isPrivateIp("192.168.1.1") - assert not helper.isPrivateIp("1.1.1.1") - assert helper.isPrivateIp("fe80::44f0:3d0:4e6:637c") - assert not helper.isPrivateIp("fca5:95d6:bfde:d902:8951:276e:1111:a22c") # cjdns - - def testOpenLocked(self): - locked_f = helper.openLocked(config.data_dir + "/locked.file") - assert locked_f - with pytest.raises(BlockingIOError): - locked_f_again = helper.openLocked(config.data_dir + "/locked.file") - locked_f_different = helper.openLocked(config.data_dir + "/locked_different.file") - - locked_f.close() - locked_f_different.close() - - os.unlink(locked_f.name) - os.unlink(locked_f_different.name) + assert helper.getFilename("/data/users/content.json") == "content.json" \ No newline at end of file diff --git a/src/Test/TestMsgpack.py b/src/Test/TestMsgpack.py deleted file mode 100644 index 5a0b6d4d..00000000 --- a/src/Test/TestMsgpack.py +++ /dev/null @@ -1,88 +0,0 @@ -import io -import os - -import msgpack -import pytest - -from Config import config -from util import Msgpack -from collections import OrderedDict - - -class TestMsgpack: - test_data = OrderedDict( - sorted({"cmd": "fileGet", "bin": b'p\x81zDhL\xf0O\xd0\xaf', "params": {"site": "1Site"}, "utf8": b'\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'.decode("utf8"), "list": [b'p\x81zDhL\xf0O\xd0\xaf', b'p\x81zDhL\xf0O\xd0\xaf']}.items()) - ) - - def testPacking(self): - assert Msgpack.pack(self.test_data) == b'\x85\xa3bin\xc4\np\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xc4\np\x81zDhL\xf0O\xd0\xaf\xc4\np\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91' - assert Msgpack.pack(self.test_data, use_bin_type=False) == b'\x85\xa3bin\xaap\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xaap\x81zDhL\xf0O\xd0\xaf\xaap\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91' - - def testUnpackinkg(self): - assert Msgpack.unpack(Msgpack.pack(self.test_data)) == self.test_data - - @pytest.mark.parametrize("unpacker_class", [msgpack.Unpacker, msgpack.fallback.Unpacker]) - def testUnpacker(self, unpacker_class): - unpacker = unpacker_class(raw=False) - - data = msgpack.packb(self.test_data, use_bin_type=True) - data += msgpack.packb(self.test_data, use_bin_type=True) - - messages = [] - for char in data: - unpacker.feed(bytes([char])) - for message in unpacker: - messages.append(message) - - assert len(messages) == 2 - assert messages[0] == self.test_data - assert messages[0] == messages[1] - - def testStreaming(self): - bin_data = os.urandom(20) - f = Msgpack.FilePart("%s/users.json" % config.data_dir, "rb") - f.read_bytes = 30 - - data = {"cmd": "response", "body": f, "bin": bin_data} - - out_buff = io.BytesIO() - Msgpack.stream(data, out_buff.write) - out_buff.seek(0) - - data_packb = { - "cmd": "response", - "body": open("%s/users.json" % config.data_dir, "rb").read(30), - "bin": bin_data - } - - out_buff.seek(0) - data_unpacked = Msgpack.unpack(out_buff.read()) - assert data_unpacked == data_packb - assert data_unpacked["cmd"] == "response" - assert type(data_unpacked["body"]) == bytes - - def testBackwardCompatibility(self): - packed = {} - packed["py3"] = Msgpack.pack(self.test_data, use_bin_type=False) - packed["py3_bin"] = Msgpack.pack(self.test_data, use_bin_type=True) - for key, val in packed.items(): - unpacked = Msgpack.unpack(val) - type(unpacked["utf8"]) == str - type(unpacked["bin"]) == bytes - - # Packed with use_bin_type=False (pre-ZeroNet 0.7.0) - unpacked = Msgpack.unpack(packed["py3"], decode=True) - type(unpacked["utf8"]) == str - type(unpacked["bin"]) == bytes - assert len(unpacked["utf8"]) == 9 - assert len(unpacked["bin"]) == 10 - with pytest.raises(UnicodeDecodeError) as err: # Try to decode binary as utf-8 - unpacked = Msgpack.unpack(packed["py3"], decode=False) - - # Packed with use_bin_type=True - unpacked = Msgpack.unpack(packed["py3_bin"], decode=False) - type(unpacked["utf8"]) == str - type(unpacked["bin"]) == bytes - assert len(unpacked["utf8"]) == 9 - assert len(unpacked["bin"]) == 10 - diff --git a/src/Test/TestNoparallel.py b/src/Test/TestNoparallel.py index 6fc4f57d..abc4c767 100644 --- a/src/Test/TestNoparallel.py +++ b/src/Test/TestNoparallel.py @@ -1,20 +1,10 @@ import time import gevent -import pytest +from gevent import monkey +monkey.patch_all() import util -from util import ThreadPool - - -@pytest.fixture(params=['gevent.spawn', 'thread_pool.spawn']) -def queue_spawn(request): - thread_pool = ThreadPool.ThreadPool(10) - if request.param == "gevent.spawn": - return gevent.spawn - else: - return thread_pool.spawn - class ExampleClass(object): def __init__(self): @@ -22,41 +12,34 @@ class ExampleClass(object): @util.Noparallel() def countBlocking(self, num=5): - for i in range(1, num + 1): - time.sleep(0.1) - self.counted += 1 - return "counted:%s" % i - - @util.Noparallel(queue=True, ignore_class=True) - def countQueue(self, num=5): - for i in range(1, num + 1): - time.sleep(0.1) + for i in range(1, num+1): + time.sleep(0.01) self.counted += 1 return "counted:%s" % i @util.Noparallel(blocking=False) def countNoblocking(self, num=5): - for i in range(1, num + 1): + for i in range(1, num+1): time.sleep(0.01) self.counted += 1 return "counted:%s" % i class TestNoparallel: - def testBlocking(self, queue_spawn): + def testBlocking(self): obj1 = ExampleClass() obj2 = ExampleClass() # Dont allow to call again until its running and wait until its running threads = [ - queue_spawn(obj1.countBlocking), - queue_spawn(obj1.countBlocking), - queue_spawn(obj1.countBlocking), - queue_spawn(obj2.countBlocking) + gevent.spawn(obj1.countBlocking), + gevent.spawn(obj1.countBlocking), + gevent.spawn(obj1.countBlocking), + gevent.spawn(obj2.countBlocking) ] assert obj2.countBlocking() == "counted:5" # The call is ignored as obj2.countBlocking already counting, but block until its finishes gevent.joinall(threads) - assert [thread.value for thread in threads] == ["counted:5", "counted:5", "counted:5", "counted:5"] + assert [thread.value for thread in threads] == ["counted:5","counted:5","counted:5","counted:5"] # Check the return value for every call obj2.countBlocking() # Allow to call again as obj2.countBlocking finished assert obj1.counted == 5 @@ -64,6 +47,7 @@ class TestNoparallel: def testNoblocking(self): obj1 = ExampleClass() + obj2 = ExampleClass() thread1 = obj1.countNoblocking() thread2 = obj1.countNoblocking() # Ignored @@ -76,92 +60,3 @@ class TestNoparallel: obj1.countNoblocking().join() # Allow again and wait until finishes assert obj1.counted == 10 - - def testQueue(self, queue_spawn): - obj1 = ExampleClass() - - queue_spawn(obj1.countQueue, num=1) - queue_spawn(obj1.countQueue, num=1) - queue_spawn(obj1.countQueue, num=1) - - time.sleep(0.3) - assert obj1.counted == 2 # No multi-queue supported - - obj2 = ExampleClass() - queue_spawn(obj2.countQueue, num=10) - queue_spawn(obj2.countQueue, num=10) - - time.sleep(1.5) # Call 1 finished, call 2 still working - assert 10 < obj2.counted < 20 - - queue_spawn(obj2.countQueue, num=10) - time.sleep(2.0) - - assert obj2.counted == 30 - - def testQueueOverload(self): - obj1 = ExampleClass() - - threads = [] - for i in range(1000): - thread = gevent.spawn(obj1.countQueue, num=5) - threads.append(thread) - - gevent.joinall(threads) - assert obj1.counted == 5 * 2 # Only called twice (no multi-queue allowed) - - def testIgnoreClass(self, queue_spawn): - obj1 = ExampleClass() - obj2 = ExampleClass() - - threads = [ - queue_spawn(obj1.countQueue), - queue_spawn(obj1.countQueue), - queue_spawn(obj1.countQueue), - queue_spawn(obj2.countQueue), - queue_spawn(obj2.countQueue) - ] - s = time.time() - time.sleep(0.001) - gevent.joinall(threads) - - # Queue limited to 2 calls (every call takes counts to 5 and takes 0.05 sec) - assert obj1.counted + obj2.counted == 10 - - taken = time.time() - s - assert 1.2 > taken >= 1.0 # 2 * 0.5s count = ~1s - - def testException(self, queue_spawn): - class MyException(Exception): - pass - - @util.Noparallel() - def raiseException(): - raise MyException("Test error!") - - with pytest.raises(MyException) as err: - raiseException() - assert str(err.value) == "Test error!" - - with pytest.raises(MyException) as err: - queue_spawn(raiseException).get() - assert str(err.value) == "Test error!" - - def testMultithreadMix(self, queue_spawn): - obj1 = ExampleClass() - with ThreadPool.ThreadPool(10) as thread_pool: - s = time.time() - t1 = queue_spawn(obj1.countBlocking, 5) - time.sleep(0.01) - t2 = thread_pool.spawn(obj1.countBlocking, 5) - time.sleep(0.01) - t3 = thread_pool.spawn(obj1.countBlocking, 5) - time.sleep(0.3) - t4 = gevent.spawn(obj1.countBlocking, 5) - threads = [t1, t2, t3, t4] - for thread in threads: - assert thread.get() == "counted:5" - - time_taken = time.time() - s - assert obj1.counted == 5 - assert 0.5 < time_taken < 0.7 diff --git a/src/Test/TestPeer.py b/src/Test/TestPeer.py index f57e046e..1140c02f 100644 --- a/src/Test/TestPeer.py +++ b/src/Test/TestPeer.py @@ -1,26 +1,27 @@ import time -import io +from cStringIO import StringIO import pytest from File import FileServer from File import FileRequest from Crypt import CryptHash -from . import Spy +import Spy @pytest.mark.usefixtures("resetSettings") @pytest.mark.usefixtures("resetTempSettings") class TestPeer: def testPing(self, file_server, site, site_temp): + file_server.ip_incoming = {} # Reset flood protection file_server.sites[site.address] = site - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client = FileServer("127.0.0.1", 1545) + client.sites[site_temp.address] = site_temp site_temp.connection_server = client - connection = client.getConnection(file_server.ip, 1544) + connection = client.getConnection("127.0.0.1", 1544) # Add file_server as peer to client - peer_file_server = site_temp.addPeer(file_server.ip, 1544) + peer_file_server = site_temp.addPeer("127.0.0.1", 1544) assert peer_file_server.ping() is not None @@ -32,28 +33,31 @@ class TestPeer: client.stop() def testDownloadFile(self, file_server, site, site_temp): + file_server.ip_incoming = {} # Reset flood protection file_server.sites[site.address] = site - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client = FileServer("127.0.0.1", 1545) + client.sites[site_temp.address] = site_temp site_temp.connection_server = client - connection = client.getConnection(file_server.ip, 1544) + connection = client.getConnection("127.0.0.1", 1544) # Add file_server as peer to client - peer_file_server = site_temp.addPeer(file_server.ip, 1544) + peer_file_server = site_temp.addPeer("127.0.0.1", 1544) # Testing streamFile - buff = peer_file_server.getFile(site_temp.address, "content.json", streaming=True) - assert b"sign" in buff.getvalue() + buff = peer_file_server.streamFile(site_temp.address, "content.json") + assert "sign" in buff.getvalue() # Testing getFile buff = peer_file_server.getFile(site_temp.address, "content.json") - assert b"sign" in buff.getvalue() + assert "sign" in buff.getvalue() connection.close() client.stop() def testHashfield(self, site): - sample_hash = list(site.content_manager.contents["content.json"]["files_optional"].values())[0]["sha512"] + sample_hash = site.content_manager.contents["content.json"]["files_optional"].values()[0]["sha512"] + + assert not site.content_manager.hashfield site.storage.verifyFiles(quick_check=True) # Find what optional files we have @@ -65,7 +69,7 @@ class TestPeer: assert site.content_manager.hashfield.getHashId(sample_hash) in site.content_manager.hashfield # Add new hash - new_hash = CryptHash.sha512sum(io.BytesIO(b"hello")) + new_hash = CryptHash.sha512sum(StringIO("hello")) assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield assert site.content_manager.hashfield.appendHash(new_hash) assert not site.content_manager.hashfield.appendHash(new_hash) # Don't add second time @@ -77,16 +81,15 @@ class TestPeer: def testHashfieldExchange(self, file_server, site, site_temp): server1 = file_server + server1.ip_incoming = {} # Reset flood protection server1.sites[site.address] = site - site.connection_server = server1 - - server2 = FileServer(file_server.ip, 1545) + server2 = FileServer("127.0.0.1", 1545) server2.sites[site_temp.address] = site_temp site_temp.connection_server = server2 site.storage.verifyFiles(quick_check=True) # Find what optional files we have # Add file_server as peer to client - server2_peer1 = site_temp.addPeer(file_server.ip, 1544) + server2_peer1 = site_temp.addPeer("127.0.0.1", 1544) # Check if hashfield has any files assert len(site.content_manager.hashfield) > 0 @@ -98,7 +101,7 @@ class TestPeer: # Test force push new hashfield site_temp.content_manager.hashfield.appendHash("AABB") - server1_peer2 = site.addPeer(file_server.ip, 1545, return_peer=True) + server1_peer2 = site.addPeer("127.0.0.1", 1545, return_peer=True) with Spy.Spy(FileRequest, "route") as requests: assert len(server1_peer2.hashfield) == 0 server2_peer1.sendMyHashfield() @@ -127,18 +130,19 @@ class TestPeer: server2.stop() def testFindHash(self, file_server, site, site_temp): + file_server.ip_incoming = {} # Reset flood protection file_server.sites[site.address] = site - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client = FileServer("127.0.0.1", 1545) + client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Add file_server as peer to client - peer_file_server = site_temp.addPeer(file_server.ip, 1544) + peer_file_server = site_temp.addPeer("127.0.0.1", 1544) assert peer_file_server.findHashIds([1234]) == {} # Add fake peer with requred hash - fake_peer_1 = site.addPeer(file_server.ip_external, 1544) + fake_peer_1 = site.addPeer("1.2.3.4", 1544) fake_peer_1.hashfield.append(1234) fake_peer_2 = site.addPeer("1.2.3.5", 1545) fake_peer_2.hashfield.append(1234) @@ -147,13 +151,7 @@ class TestPeer: fake_peer_3.hashfield.append(1235) fake_peer_3.hashfield.append(1236) - res = peer_file_server.findHashIds([1234, 1235]) - assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545)]) - assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)]) - - # Test my address adding - site.content_manager.hashfield.append(1234) - - res = peer_file_server.findHashIds([1234, 1235]) - assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545), (file_server.ip, 1544)]) - assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)]) + assert peer_file_server.findHashIds([1234, 1235]) == { + 1234: [('1.2.3.4', 1544), ('1.2.3.5', 1545)], + 1235: [('1.2.3.5', 1545), ('1.2.3.6', 1546)] + } diff --git a/src/Test/TestRateLimit.py b/src/Test/TestRateLimit.py index fafa5f1a..a823d88b 100644 --- a/src/Test/TestRateLimit.py +++ b/src/Test/TestRateLimit.py @@ -1,6 +1,8 @@ import time import gevent +from gevent import monkey +monkey.patch_all() from util import RateLimit @@ -37,7 +39,6 @@ class TestRateLimit: assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted" assert around(time.time() - s, 0.1) # Delays second call within interval assert obj1.counted == 2 - time.sleep(0.1) # Wait the cooldown time # Call 3 times async s = time.time() @@ -51,11 +52,6 @@ class TestRateLimit: assert [thread.value for thread in threads] == ["counted", "counted", "counted"] assert around(time.time() - s, 0.2) - # Wait 0.1s cooldown - assert not RateLimit.isAllowed("counting", 0.1) - time.sleep(0.11) - assert RateLimit.isAllowed("counting", 0.1) - # No queue = instant again s = time.time() assert RateLimit.isAllowed("counting", 0.1) @@ -89,12 +85,12 @@ class TestRateLimit: assert obj1.counted == 2 assert obj1.last_called == "call #4" - # Just called, not allowed again - assert not RateLimit.isAllowed("counting async", 0.1) + # Allowed again instantly + assert RateLimit.isAllowed("counting async", 0.1) s = time.time() - t4 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #5").join() + RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #5").join() assert obj1.counted == 3 - assert around(time.time() - s, 0.1) + assert around(time.time() - s, 0.0) assert not RateLimit.isAllowed("counting async", 0.1) time.sleep(0.11) assert RateLimit.isAllowed("counting async", 0.1) diff --git a/src/Test/TestSafeRe.py b/src/Test/TestSafeRe.py deleted file mode 100644 index 429bde50..00000000 --- a/src/Test/TestSafeRe.py +++ /dev/null @@ -1,24 +0,0 @@ -from util import SafeRe - -import pytest - - -class TestSafeRe: - def testSafeMatch(self): - assert SafeRe.match( - "((js|css)/(?!all.(js|css))|data/users/.*db|data/users/.*/.*|data/archived|.*.py)", - "js/ZeroTalk.coffee" - ) - assert SafeRe.match(".+/data.json", "data/users/1J3rJ8ecnwH2EPYa6MrgZttBNc61ACFiCj/data.json") - - @pytest.mark.parametrize("pattern", ["([a-zA-Z]+)*", "(a|aa)+*", "(a|a?)+", "(.*a){10}", "((?!json).)*$", r"(\w+\d+)+C"]) - def testUnsafeMatch(self, pattern): - with pytest.raises(SafeRe.UnsafePatternError) as err: - SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!") - assert "Potentially unsafe" in str(err.value) - - @pytest.mark.parametrize("pattern", ["^(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)$"]) - def testUnsafeRepetition(self, pattern): - with pytest.raises(SafeRe.UnsafePatternError) as err: - SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!") - assert "More than" in str(err.value) diff --git a/src/Test/TestSite.py b/src/Test/TestSite.py index 05bb2ed9..5aa14069 100644 --- a/src/Test/TestSite.py +++ b/src/Test/TestSite.py @@ -4,17 +4,16 @@ import os import pytest from Site import SiteManager -TEST_DATA_PATH = "src/Test/testdata" @pytest.mark.usefixtures("resetSettings") class TestSite: def testClone(self, site): - assert site.storage.directory == TEST_DATA_PATH + "/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" + assert site.storage.directory == "src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" # Remove old files - if os.path.isdir(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL"): - shutil.rmtree(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL") - assert not os.path.isfile(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL/content.json") + if os.path.isdir("src/Test/testdata/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL"): + shutil.rmtree("src/Test/testdata/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL") + assert not os.path.isfile("src/Test/testdata/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL/content.json") # Clone 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT to 15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc new_site = site.clone( @@ -27,18 +26,14 @@ class TestSite: assert new_site.storage.isFile("index.html") assert new_site.storage.isFile("data/users/content.json") assert new_site.storage.isFile("data/zeroblog.db") - assert new_site.storage.verifyFiles()["bad_files"] == [] # No bad files allowed + assert new_site.storage.verifyFiles() == [] # No bad files allowed assert new_site.storage.query("SELECT * FROM keyvalue WHERE key = 'title'").fetchone()["value"] == "MyZeroBlog" - # Optional files should be removed - - assert len(new_site.storage.loadJson("content.json").get("files_optional", {})) == 0 - # Test re-cloning (updating) # Changes in non-data files should be overwritten - new_site.storage.write("index.html", b"this will be overwritten") - assert new_site.storage.read("index.html") == b"this will be overwritten" + new_site.storage.write("index.html", "this will be overwritten") + assert new_site.storage.read("index.html"), "this will be overwritten" # Changes in data file should be kept after re-cloning changed_contentjson = new_site.storage.loadJson("content.json") @@ -53,7 +48,6 @@ class TestSite: assert new_site.storage.query("SELECT * FROM keyvalue WHERE key = 'title'").fetchone()["value"] == "UpdateTest" # Re-clone the site - site.log.debug("Re-cloning") site.clone("159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL") assert new_site.storage.loadJson("data/data.json")["title"] == "UpdateTest" @@ -62,7 +56,7 @@ class TestSite: # Delete created files new_site.storage.deleteFiles() - assert not os.path.isdir(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL") + assert not os.path.isdir("src/Test/testdata/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL") # Delete from site registry assert new_site.address in SiteManager.site_manager.sites diff --git a/src/Test/TestSiteDownload.py b/src/Test/TestSiteDownload.py index cd0a4c9f..85ad9d7d 100644 --- a/src/Test/TestSiteDownload.py +++ b/src/Test/TestSiteDownload.py @@ -3,21 +3,21 @@ import time import pytest import mock import gevent -import gevent.event -import os from Connection import ConnectionServer from Config import config from File import FileRequest from File import FileServer -from Site.Site import Site -from . import Spy +from Site import Site +import Spy @pytest.mark.usefixtures("resetTempSettings") @pytest.mark.usefixtures("resetSettings") class TestSiteDownload: - def testRename(self, file_server, site, site_temp): + def testDownload(self, file_server, site, site_temp): + file_server.ip_incoming = {} # Reset flood protection + assert site.storage.directory == config.data_dir + "/" + site.address assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address @@ -26,220 +26,55 @@ class TestSiteDownload: file_server.sites[site.address] = site # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client = ConnectionServer("127.0.0.1", 1545) site_temp.connection_server = client site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net - - site_temp.addPeer(file_server.ip, 1544) - - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - - assert site_temp.storage.isFile("content.json") - - # Rename non-optional file - os.rename(site.storage.getPath("data/img/domain.png"), site.storage.getPath("data/img/domain-new.png")) - - site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - - content = site.storage.loadJson("content.json") - assert "data/img/domain-new.png" in content["files"] - assert "data/img/domain.png" not in content["files"] - assert not site_temp.storage.isFile("data/img/domain-new.png") - assert site_temp.storage.isFile("data/img/domain.png") - settings_before = site_temp.settings - + site_temp.addPeer("127.0.0.1", 1544) with Spy.Spy(FileRequest, "route") as requests: - site.publish() - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download - assert "streamFile" not in [req[1] for req in requests] + def boostRequest(inner_path): + # I really want these file + if inner_path == "index.html": + print "needFile" + site_temp.needFile("data/img/multiuser.png", priority=9, blocking=False) + site_temp.needFile("data/img/direct_domains.png", priority=10, blocking=False) + site_temp.onFileDone.append(boostRequest) + site_temp.download(blind_includes=True).join(timeout=5) + file_requests = [request[2]["inner_path"] for request in requests if request[0] in ("getFile", "streamFile")] + # Test priority + assert file_requests[0:2] == ["content.json", "index.html"] # Must-have files + assert file_requests[2:4] == ["data/img/direct_domains.png", "data/img/multiuser.png"] # Directly requested files + assert file_requests[4:6] == ["css/all.css", "js/all.js"] # Important assets + assert file_requests[6] == "dbschema.json" # Database map + assert "-default" in file_requests[-1] # Put default files for cloning to the end - content = site_temp.storage.loadJson("content.json") - assert "data/img/domain-new.png" in content["files"] - assert "data/img/domain.png" not in content["files"] - assert site_temp.storage.isFile("data/img/domain-new.png") - assert not site_temp.storage.isFile("data/img/domain.png") - - assert site_temp.settings["size"] == settings_before["size"] - assert site_temp.settings["size_optional"] == settings_before["size_optional"] - - assert site_temp.storage.deleteFiles() - [connection.close() for connection in file_server.connections] - - def testRenameOptional(self, file_server, site, site_temp): - assert site.storage.directory == config.data_dir + "/" + site.address - assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address - - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net - - - site_temp.addPeer(file_server.ip, 1544) - - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - - assert site_temp.settings["optional_downloaded"] == 0 - - site_temp.needFile("data/optional.txt") - - assert site_temp.settings["optional_downloaded"] > 0 - settings_before = site_temp.settings - hashfield_before = site_temp.content_manager.hashfield.tobytes() - - # Rename optional file - os.rename(site.storage.getPath("data/optional.txt"), site.storage.getPath("data/optional-new.txt")) - - site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", remove_missing_optional=True) - - content = site.storage.loadJson("content.json") - assert "data/optional-new.txt" in content["files_optional"] - assert "data/optional.txt" not in content["files_optional"] - assert not site_temp.storage.isFile("data/optional-new.txt") - assert site_temp.storage.isFile("data/optional.txt") - - with Spy.Spy(FileRequest, "route") as requests: - site.publish() - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download - assert "streamFile" not in [req[1] for req in requests] - - content = site_temp.storage.loadJson("content.json") - assert "data/optional-new.txt" in content["files_optional"] - assert "data/optional.txt" not in content["files_optional"] - assert site_temp.storage.isFile("data/optional-new.txt") - assert not site_temp.storage.isFile("data/optional.txt") - - assert site_temp.settings["size"] == settings_before["size"] - assert site_temp.settings["size_optional"] == settings_before["size_optional"] - assert site_temp.settings["optional_downloaded"] == settings_before["optional_downloaded"] - assert site_temp.content_manager.hashfield.tobytes() == hashfield_before - - assert site_temp.storage.deleteFiles() - [connection.close() for connection in file_server.connections] - - - def testArchivedDownload(self, file_server, site, site_temp): - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - - # Download normally - site_temp.addPeer(file_server.ip, 1544) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] + # Check files + bad_files = site_temp.storage.verifyFiles(quick_check=True) + # -1 because data/users/1J6... user has invalid cert + assert len(site_temp.content_manager.contents) == len(site.content_manager.contents) - 1 assert not bad_files - assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents - assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") - assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2 - # Add archived data - assert "archived" not in site.content_manager.contents["data/users/content.json"]["user_contents"] - assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1) - - site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"] = {"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()} - site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - - date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"] - assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1) - assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived) - assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later - - # Push archived update - assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] - site.publish() - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download - - # The archived content should disappear from remote client - assert "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] - assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents - assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q") - assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1 - assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0 - - assert site_temp.storage.deleteFiles() - [connection.close() for connection in file_server.connections] - - def testArchivedBeforeDownload(self, file_server, site, site_temp): - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - - # Download normally - site_temp.addPeer(file_server.ip, 1544) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] - - assert not bad_files - assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents - assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") - assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2 - - # Add archived data - assert not "archived_before" in site.content_manager.contents["data/users/content.json"]["user_contents"] - assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1) - - content_modification_time = site.content_manager.contents["data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json"]["modified"] - site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] = content_modification_time - site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - - date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] - assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1) - assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived) - assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later - - # Push archived update - assert not "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] - site.publish() - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download - - # The archived content should disappear from remote client - assert "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] - assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents - assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q") - assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1 - assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0 - - assert site_temp.storage.deleteFiles() [connection.close() for connection in file_server.connections] # Test when connected peer has the optional file def testOptionalDownload(self, file_server, site, site_temp): + file_server.ip_incoming = {} # Reset flood protection + # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server - client = ConnectionServer(file_server.ip, 1545) + client = ConnectionServer("127.0.0.1", 1545) site_temp.connection_server = client site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net - site_temp.addPeer(file_server.ip, 1544) + site_temp.addPeer("127.0.0.1", 1544) # Download site - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + site_temp.download(blind_includes=True).join(timeout=5) # Download optional data/optional.txt site.storage.verifyFiles(quick_check=True) # Find what optional files we have @@ -269,56 +104,38 @@ class TestSiteDownload: # Test when connected peer does not has the file, so ask him if he know someone who has it def testFindOptional(self, file_server, site, site_temp): + file_server.ip_incoming = {} # Reset flood protection + # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init full source server (has optional files) site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") - file_server_full = FileServer(file_server.ip, 1546) + file_server_full = FileServer("127.0.0.1", 1546) site_full.connection_server = file_server_full - - def listen(): - ConnectionServer.start(file_server_full) - ConnectionServer.listen(file_server_full) - - gevent.spawn(listen) - time.sleep(0.001) # Port opening + gevent.spawn(lambda: ConnectionServer.start(file_server_full)) + time.sleep(0) # Port opening file_server_full.sites[site_full.address] = site_full # Add site site_full.storage.verifyFiles(quick_check=True) # Check optional files - site_full_peer = site.addPeer(file_server.ip, 1546) # Add it to source server - hashfield = site_full_peer.updateHashfield() # Update hashfield - assert len(site_full.content_manager.hashfield) == 8 - assert hashfield - assert site_full.storage.isFile("data/optional.txt") - assert site_full.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") - assert len(site_full_peer.hashfield) == 8 - - # Remove hashes from source server - for hash in list(site.content_manager.hashfield): - site.content_manager.hashfield.remove(hash) + site_full_peer = site.addPeer("127.0.0.1", 1546) # Add it to source server + assert site_full_peer.updateHashfield() # Update hashfield # Init client server - site_temp.connection_server = ConnectionServer(file_server.ip, 1545) - site_temp.addPeer(file_server.ip, 1544) # Add source server + site_temp.connection_server = ConnectionServer("127.0.0.1", 1545) + site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net + site_temp.addPeer("127.0.0.1", 1544) # Add source server # Download normal files - site_temp.log.info("Start Downloading site") - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + site_temp.download(blind_includes=True).join(timeout=5) # Download optional data/optional.txt optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt") - optional_file_info2 = site_temp.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") assert not site_temp.storage.isFile("data/optional.txt") - assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") assert not site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source server don't know he has the file - assert not site.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source server don't know he has the file assert site_full_peer.hashfield.hasHash(optional_file_info["sha512"]) # Source full peer on source server has the file - assert site_full_peer.hashfield.hasHash(optional_file_info2["sha512"]) # Source full peer on source server has the file assert site_full.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source full server he has the file - assert site_full.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source full server he has the file - site_temp.log.info("Request optional files") with Spy.Spy(FileRequest, "route") as requests: # Request 2 file same time threads = [] @@ -326,7 +143,7 @@ class TestSiteDownload: threads.append(site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False)) gevent.joinall(threads) - assert len([request for request in requests if request[1] == "findHashIds"]) == 1 # findHashids should call only once + assert len([request for request in requests if request[0] == "findHashIds"]) == 1 # findHashids should call only once assert site_temp.storage.isFile("data/optional.txt") assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") @@ -334,229 +151,3 @@ class TestSiteDownload: assert site_temp.storage.deleteFiles() file_server_full.stop() [connection.close() for connection in file_server.connections] - site_full.content_manager.contents.db.close("FindOptional test end") - - def testUpdate(self, file_server, site, site_temp): - assert site.storage.directory == config.data_dir + "/" + site.address - assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address - - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - - # Don't try to find peers from the net - site.announce = mock.MagicMock(return_value=True) - site_temp.announce = mock.MagicMock(return_value=True) - - # Connect peers - site_temp.addPeer(file_server.ip, 1544) - - # Download site from site to site_temp - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - assert len(site_temp.bad_files) == 1 - - # Update file - data_original = site.storage.open("data/data.json").read() - data_new = data_original.replace(b'"ZeroBlog"', b'"UpdatedZeroBlog"') - assert data_original != data_new - - site.storage.open("data/data.json", "wb").write(data_new) - - assert site.storage.open("data/data.json").read() == data_new - assert site_temp.storage.open("data/data.json").read() == data_original - - site.log.info("Publish new data.json without patch") - # Publish without patch - with Spy.Spy(FileRequest, "route") as requests: - site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - site.publish() - time.sleep(0.1) - site.log.info("Downloading site") - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - assert len([request for request in requests if request[1] in ("getFile", "streamFile")]) == 1 - - assert site_temp.storage.open("data/data.json").read() == data_new - - # Close connection to avoid update spam limit - list(site.peers.values())[0].remove() - site.addPeer(file_server.ip, 1545) - list(site_temp.peers.values())[0].ping() # Connect back - time.sleep(0.1) - - # Update with patch - data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"') - assert data_original != data_new - - site.storage.open("data/data.json-new", "wb").write(data_new) - - assert site.storage.open("data/data.json-new").read() == data_new - assert site_temp.storage.open("data/data.json").read() != data_new - - # Generate diff - diffs = site.content_manager.getDiffs("content.json") - assert not site.storage.isFile("data/data.json-new") # New data file removed - assert site.storage.open("data/data.json").read() == data_new # -new postfix removed - assert "data/data.json" in diffs - assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', [b'\t"title": "PatchedZeroBlog",\n']), ('=', 31102)] - - # Publish with patch - site.log.info("Publish new data.json with patch") - with Spy.Spy(FileRequest, "route") as requests: - site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - - event_done = gevent.event.AsyncResult() - site.publish(diffs=diffs) - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - assert [request for request in requests if request[1] in ("getFile", "streamFile")] == [] - - assert site_temp.storage.open("data/data.json").read() == data_new - - assert site_temp.storage.deleteFiles() - [connection.close() for connection in file_server.connections] - - def testBigUpdate(self, file_server, site, site_temp): - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - - # Connect peers - site_temp.addPeer(file_server.ip, 1544) - - # Download site from site to site_temp - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - assert list(site_temp.bad_files.keys()) == ["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"] - - # Update file - data_original = site.storage.open("data/data.json").read() - data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"') - assert data_original != data_new - - site.storage.open("data/data.json-new", "wb").write(data_new) - - assert site.storage.open("data/data.json-new").read() == data_new - assert site_temp.storage.open("data/data.json").read() != data_new - - # Generate diff - diffs = site.content_manager.getDiffs("content.json") - assert not site.storage.isFile("data/data.json-new") # New data file removed - assert site.storage.open("data/data.json").read() == data_new # -new postfix removed - assert "data/data.json" in diffs - - content_json = site.storage.loadJson("content.json") - content_json["description"] = "BigZeroBlog" * 1024 * 10 - site.storage.writeJson("content.json", content_json) - site.content_manager.loadContent("content.json", force=True) - - # Publish with patch - site.log.info("Publish new data.json with patch") - with Spy.Spy(FileRequest, "route") as requests: - site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - assert site.storage.getSize("content.json") > 10 * 1024 # Make it a big content.json - site.publish(diffs=diffs) - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - file_requests = [request for request in requests if request[1] in ("getFile", "streamFile")] - assert len(file_requests) == 1 - - assert site_temp.storage.open("data/data.json").read() == data_new - assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read() - - # Test what happened if the content.json of the site is bigger than the site limit - def testHugeContentSiteUpdate(self, file_server, site, site_temp): - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - - # Connect peers - site_temp.addPeer(file_server.ip, 1544) - - # Download site from site to site_temp - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - site_temp.settings["size_limit"] = int(20 * 1024 *1024) - site_temp.saveSettings() - - # Raise limit size to 20MB on site so it can be signed - site.settings["size_limit"] = int(20 * 1024 *1024) - site.saveSettings() - - content_json = site.storage.loadJson("content.json") - content_json["description"] = "PartirUnJour" * 1024 * 1024 - site.storage.writeJson("content.json", content_json) - changed, deleted = site.content_manager.loadContent("content.json", force=True) - - # Make sure we have 2 differents content.json - assert site_temp.storage.open("content.json").read() != site.storage.open("content.json").read() - - # Generate diff - diffs = site.content_manager.getDiffs("content.json") - - # Publish with patch - site.log.info("Publish new content.json bigger than 10MB") - with Spy.Spy(FileRequest, "route") as requests: - site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - assert site.storage.getSize("content.json") > 10 * 1024 * 1024 # verify it over 10MB - time.sleep(0.1) - site.publish(diffs=diffs) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - - assert site_temp.storage.getSize("content.json") < site_temp.getSizeLimit() * 1024 * 1024 - assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read() - - def testUnicodeFilename(self, file_server, site, site_temp): - assert site.storage.directory == config.data_dir + "/" + site.address - assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address - - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net - - site_temp.addPeer(file_server.ip, 1544) - - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - - site.storage.write("data/img/ÃĄrvíztÅąrő.png", b"test") - - site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - - content = site.storage.loadJson("content.json") - assert "data/img/ÃĄrvíztÅąrő.png" in content["files"] - assert not site_temp.storage.isFile("data/img/ÃĄrvíztÅąrő.png") - settings_before = site_temp.settings - - with Spy.Spy(FileRequest, "route") as requests: - site.publish() - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download - assert len([req[1] for req in requests if req[1] == "streamFile"]) == 1 - - content = site_temp.storage.loadJson("content.json") - assert "data/img/ÃĄrvíztÅąrő.png" in content["files"] - assert site_temp.storage.isFile("data/img/ÃĄrvíztÅąrő.png") - - assert site_temp.settings["size"] == settings_before["size"] - assert site_temp.settings["size_optional"] == settings_before["size_optional"] - - assert site_temp.storage.deleteFiles() - [connection.close() for connection in file_server.connections] diff --git a/src/Test/TestSiteStorage.py b/src/Test/TestSiteStorage.py index f11262bf..b4d45468 100644 --- a/src/Test/TestSiteStorage.py +++ b/src/Test/TestSiteStorage.py @@ -3,23 +3,11 @@ import pytest @pytest.mark.usefixtures("resetSettings") class TestSiteStorage: - def testWalk(self, site): - # Rootdir - walk_root = list(site.storage.walk("")) - assert "content.json" in walk_root - assert "css/all.css" in walk_root - - # Subdir - assert list(site.storage.walk("data-default")) == ["data.json", "users/content-default.json"] - def testList(self, site): # Rootdir list_root = list(site.storage.list("")) assert "content.json" in list_root - assert "css/all.css" not in list_root + assert "css/all.css" in list_root # Subdir - assert set(site.storage.list("data-default")) == set(["data.json", "users"]) - - def testDbRebuild(self, site): - assert site.storage.rebuildDb() + assert list(site.storage.list("data-default")) == ["data.json", "users/content-default.json"] diff --git a/src/Test/TestThreadPool.py b/src/Test/TestThreadPool.py deleted file mode 100644 index 5e95005e..00000000 --- a/src/Test/TestThreadPool.py +++ /dev/null @@ -1,163 +0,0 @@ -import time -import threading - -import gevent -import pytest - -from util import ThreadPool - - -class TestThreadPool: - def testExecutionOrder(self): - with ThreadPool.ThreadPool(4) as pool: - events = [] - - @pool.wrap - def blocker(): - events.append("S") - out = 0 - for i in range(10000000): - if i == 3000000: - events.append("M") - out += 1 - events.append("D") - return out - - threads = [] - for i in range(3): - threads.append(gevent.spawn(blocker)) - gevent.joinall(threads) - - assert events == ["S"] * 3 + ["M"] * 3 + ["D"] * 3 - - res = blocker() - assert res == 10000000 - - def testLockBlockingSameThread(self): - lock = ThreadPool.Lock() - - s = time.time() - - def unlocker(): - time.sleep(1) - lock.release() - - gevent.spawn(unlocker) - lock.acquire(True) - lock.acquire(True, timeout=2) - - unlock_taken = time.time() - s - - assert 1.0 < unlock_taken < 1.5 - - def testLockBlockingDifferentThread(self): - lock = ThreadPool.Lock() - - def locker(): - lock.acquire(True) - time.sleep(0.5) - lock.release() - - with ThreadPool.ThreadPool(10) as pool: - threads = [ - pool.spawn(locker), - pool.spawn(locker), - gevent.spawn(locker), - pool.spawn(locker) - ] - time.sleep(0.1) - - s = time.time() - - lock.acquire(True, 5.0) - - unlock_taken = time.time() - s - - assert 1.8 < unlock_taken < 2.2 - - gevent.joinall(threads) - - def testMainLoopCallerThreadId(self): - main_thread_id = threading.current_thread().ident - with ThreadPool.ThreadPool(5) as pool: - def getThreadId(*args, **kwargs): - return threading.current_thread().ident - - t = pool.spawn(getThreadId) - assert t.get() != main_thread_id - - t = pool.spawn(lambda: ThreadPool.main_loop.call(getThreadId)) - assert t.get() == main_thread_id - - def testMainLoopCallerGeventSpawn(self): - main_thread_id = threading.current_thread().ident - with ThreadPool.ThreadPool(5) as pool: - def waiter(): - time.sleep(1) - return threading.current_thread().ident - - def geventSpawner(): - event = ThreadPool.main_loop.call(gevent.spawn, waiter) - - with pytest.raises(Exception) as greenlet_err: - event.get() - assert str(greenlet_err.value) == "cannot switch to a different thread" - - waiter_thread_id = ThreadPool.main_loop.call(event.get) - return waiter_thread_id - - s = time.time() - waiter_thread_id = pool.apply(geventSpawner) - assert main_thread_id == waiter_thread_id - time_taken = time.time() - s - assert 0.9 < time_taken < 1.2 - - def testEvent(self): - with ThreadPool.ThreadPool(5) as pool: - event = ThreadPool.Event() - - def setter(): - time.sleep(1) - event.set("done!") - - def getter(): - return event.get() - - pool.spawn(setter) - t_gevent = gevent.spawn(getter) - t_pool = pool.spawn(getter) - s = time.time() - assert event.get() == "done!" - time_taken = time.time() - s - gevent.joinall([t_gevent, t_pool]) - - assert t_gevent.get() == "done!" - assert t_pool.get() == "done!" - - assert 0.9 < time_taken < 1.2 - - with pytest.raises(Exception) as err: - event.set("another result") - - assert "Event already has value" in str(err.value) - - def testMemoryLeak(self): - import gc - thread_objs_before = [id(obj) for obj in gc.get_objects() if "threadpool" in str(type(obj))] - - def worker(): - time.sleep(0.1) - return "ok" - - def poolTest(): - with ThreadPool.ThreadPool(5) as pool: - for i in range(20): - pool.spawn(worker) - - for i in range(5): - poolTest() - new_thread_objs = [obj for obj in gc.get_objects() if "threadpool" in str(type(obj)) and id(obj) not in thread_objs_before] - #print("New objs:", new_thread_objs, "run:", num_run) - - # Make sure no threadpool object left behind - assert not new_thread_objs diff --git a/src/Test/TestTor.py b/src/Test/TestTor.py deleted file mode 100644 index e6b82c1a..00000000 --- a/src/Test/TestTor.py +++ /dev/null @@ -1,153 +0,0 @@ -import time - -import pytest -import mock - -from File import FileServer -from Crypt import CryptTor -from Config import config - -@pytest.mark.usefixtures("resetSettings") -@pytest.mark.usefixtures("resetTempSettings") -class TestTor: - def testDownload(self, tor_manager): - for retry in range(15): - time.sleep(1) - if tor_manager.enabled and tor_manager.conn: - break - assert tor_manager.enabled - - def testManagerConnection(self, tor_manager): - assert "250-version" in tor_manager.request("GETINFO version") - - def testAddOnion(self, tor_manager): - # Add - address = tor_manager.addOnion() - assert address - assert address in tor_manager.privatekeys - - # Delete - assert tor_manager.delOnion(address) - assert address not in tor_manager.privatekeys - - def testSignOnion(self, tor_manager): - address = tor_manager.addOnion() - - # Sign - sign = CryptTor.sign(b"hello", tor_manager.getPrivatekey(address)) - assert len(sign) == 128 - - # Verify - publickey = CryptTor.privatekeyToPublickey(tor_manager.getPrivatekey(address)) - assert len(publickey) == 140 - assert CryptTor.verify(b"hello", publickey, sign) - assert not CryptTor.verify(b"not hello", publickey, sign) - - # Pub to address - assert CryptTor.publickeyToOnion(publickey) == address - - # Delete - tor_manager.delOnion(address) - - @pytest.mark.slow - def testConnection(self, tor_manager, file_server, site, site_temp): - file_server.tor_manager.start_onions = True - address = file_server.tor_manager.getOnion(site.address) - assert address - print("Connecting to", address) - for retry in range(5): # Wait for hidden service creation - time.sleep(10) - try: - connection = file_server.getConnection(address + ".onion", 1544) - if connection: - break - except Exception as err: - continue - assert connection.handshake - assert not connection.handshake["peer_id"] # No peer_id for Tor connections - - # Return the same connection without site specified - assert file_server.getConnection(address + ".onion", 1544) == connection - # No reuse for different site - assert file_server.getConnection(address + ".onion", 1544, site=site) != connection - assert file_server.getConnection(address + ".onion", 1544, site=site) == file_server.getConnection(address + ".onion", 1544, site=site) - site_temp.address = "1OTHERSITE" - assert file_server.getConnection(address + ".onion", 1544, site=site) != file_server.getConnection(address + ".onion", 1544, site=site_temp) - - # Only allow to query from the locked site - file_server.sites[site.address] = site - connection_locked = file_server.getConnection(address + ".onion", 1544, site=site) - assert "body" in connection_locked.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0}) - assert connection_locked.request("getFile", {"site": "1OTHERSITE", "inner_path": "content.json", "location": 0})["error"] == "Invalid site" - - def testPex(self, file_server, site, site_temp): - # Register site to currently running fileserver - site.connection_server = file_server - file_server.sites[site.address] = site - # Create a new file server to emulate new peer connecting to our peer - file_server_temp = FileServer(file_server.ip, 1545) - site_temp.connection_server = file_server_temp - file_server_temp.sites[site_temp.address] = site_temp - - # We will request peers from this - peer_source = site_temp.addPeer(file_server.ip, 1544) - - # Get ip4 peers from source site - site.addPeer("1.2.3.4", 1555) # Add peer to source site - assert peer_source.pex(need_num=10) == 1 - assert len(site_temp.peers) == 2 - assert "1.2.3.4:1555" in site_temp.peers - - # Get onion peers from source site - site.addPeer("bka4ht2bzxchy44r.onion", 1555) - assert "bka4ht2bzxchy44r.onion:1555" not in site_temp.peers - - # Don't add onion peers if not supported - assert "onion" not in file_server_temp.supported_ip_types - assert peer_source.pex(need_num=10) == 0 - - file_server_temp.supported_ip_types.append("onion") - assert peer_source.pex(need_num=10) == 1 - - assert "bka4ht2bzxchy44r.onion:1555" in site_temp.peers - - def testFindHash(self, tor_manager, file_server, site, site_temp): - file_server.ip_incoming = {} # Reset flood protection - file_server.sites[site.address] = site - file_server.tor_manager = tor_manager - - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - - # Add file_server as peer to client - peer_file_server = site_temp.addPeer(file_server.ip, 1544) - - assert peer_file_server.findHashIds([1234]) == {} - - # Add fake peer with requred hash - fake_peer_1 = site.addPeer("bka4ht2bzxchy44r.onion", 1544) - fake_peer_1.hashfield.append(1234) - fake_peer_2 = site.addPeer("1.2.3.5", 1545) - fake_peer_2.hashfield.append(1234) - fake_peer_2.hashfield.append(1235) - fake_peer_3 = site.addPeer("1.2.3.6", 1546) - fake_peer_3.hashfield.append(1235) - fake_peer_3.hashfield.append(1236) - - res = peer_file_server.findHashIds([1234, 1235]) - - assert sorted(res[1234]) == [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544)] - assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)] - - # Test my address adding - site.content_manager.hashfield.append(1234) - - res = peer_file_server.findHashIds([1234, 1235]) - assert sorted(res[1234]) == [('1.2.3.5', 1545), (file_server.ip, 1544), ("bka4ht2bzxchy44r.onion", 1544)] - assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)] - - def testSiteOnion(self, tor_manager): - with mock.patch.object(config, "tor", "always"): - assert tor_manager.getOnion("address1") != tor_manager.getOnion("address2") - assert tor_manager.getOnion("address1") == tor_manager.getOnion("address1") diff --git a/src/Test/TestTranslate.py b/src/Test/TestTranslate.py deleted file mode 100644 index 348a65a6..00000000 --- a/src/Test/TestTranslate.py +++ /dev/null @@ -1,61 +0,0 @@ -from Translate import Translate - -class TestTranslate: - def testTranslateStrict(self): - translate = Translate() - data = """ - translated = _("original") - not_translated = "original" - """ - data_translated = translate.translateData(data, {"_(original)": "translated"}) - assert 'translated = _("translated")' in data_translated - assert 'not_translated = "original"' in data_translated - - def testTranslateStrictNamed(self): - translate = Translate() - data = """ - translated = _("original", "original named") - translated_other = _("original", "original other named") - not_translated = "original" - """ - data_translated = translate.translateData(data, {"_(original, original named)": "translated"}) - assert 'translated = _("translated")' in data_translated - assert 'not_translated = "original"' in data_translated - - def testTranslateUtf8(self): - translate = Translate() - data = """ - greeting = "Hi again ÃĄrvztÅąrőtÃļkÃļrfÃērÃŗgÊp!" - """ - data_translated = translate.translateData(data, {"Hi again ÃĄrvztÅąrőtÃļkÃļrfÃērÃŗgÊp!": "Üdv Ãējra ÃĄrvztÅąrőtÃļkÃļrfÃērÃŗgÊp!"}) - assert data_translated == """ - greeting = "Üdv Ãējra ÃĄrvztÅąrőtÃļkÃļrfÃērÃŗgÊp!" - """ - - def testTranslateEscape(self): - _ = Translate() - _["Hello"] = "Szia" - - # Simple escaping - data = "{_[Hello]} {username}!" - username = "Hacker" - data_translated = _(data) - assert 'Szia' in data_translated - assert '<' not in data_translated - assert data_translated == "Szia Hacker<script>alert('boom')</script>!" - - # Escaping dicts - user = {"username": "Hacker"} - data = "{_[Hello]} {user[username]}!" - data_translated = _(data) - assert 'Szia' in data_translated - assert '<' not in data_translated - assert data_translated == "Szia Hacker<script>alert('boom')</script>!" - - # Escaping lists - users = [{"username": "Hacker"}] - data = "{_[Hello]} {users[0][username]}!" - data_translated = _(data) - assert 'Szia' in data_translated - assert '<' not in data_translated - assert data_translated == "Szia Hacker<script>alert('boom')</script>!" diff --git a/src/Test/TestUiWebsocket.py b/src/Test/TestUiWebsocket.py deleted file mode 100644 index d2d23d03..00000000 --- a/src/Test/TestUiWebsocket.py +++ /dev/null @@ -1,11 +0,0 @@ -import sys -import pytest - -@pytest.mark.usefixtures("resetSettings") -class TestUiWebsocket: - def testPermission(self, ui_websocket): - res = ui_websocket.testAction("ping") - assert res == "pong" - - res = ui_websocket.testAction("certList") - assert "You don't have permission" in res["error"] diff --git a/src/Test/TestUpnpPunch.py b/src/Test/TestUpnpPunch.py deleted file mode 100644 index f17c77bd..00000000 --- a/src/Test/TestUpnpPunch.py +++ /dev/null @@ -1,274 +0,0 @@ -import socket -from urllib.parse import urlparse - -import pytest -import mock - -from util import UpnpPunch as upnp - - -@pytest.fixture -def mock_socket(): - mock_socket = mock.MagicMock() - mock_socket.recv = mock.MagicMock(return_value=b'Hello') - mock_socket.bind = mock.MagicMock() - mock_socket.send_to = mock.MagicMock() - - return mock_socket - - -@pytest.fixture -def url_obj(): - return urlparse('http://192.168.1.1/ctrlPoint.xml') - - -@pytest.fixture(params=['WANPPPConnection', 'WANIPConnection']) -def igd_profile(request): - return """ - urn:schemas-upnp-org:service:{}:1 - urn:upnp-org:serviceId:wanpppc:pppoa - /upnp/control/wanpppcpppoa - /upnp/event/wanpppcpppoa - /WANPPPConnection.xml -""".format(request.param) - - -@pytest.fixture -def httplib_response(): - class FakeResponse(object): - def __init__(self, status=200, body='OK'): - self.status = status - self.body = body - - def read(self): - return self.body - return FakeResponse - - -class TestUpnpPunch(object): - def test_perform_m_search(self, mock_socket): - local_ip = '127.0.0.1' - - with mock.patch('util.UpnpPunch.socket.socket', - return_value=mock_socket): - result = upnp.perform_m_search(local_ip) - assert result == 'Hello' - assert local_ip == mock_socket.bind.call_args_list[0][0][0][0] - assert ('239.255.255.250', - 1900) == mock_socket.sendto.call_args_list[0][0][1] - - def test_perform_m_search_socket_error(self, mock_socket): - mock_socket.recv.side_effect = socket.error('Timeout error') - - with mock.patch('util.UpnpPunch.socket.socket', - return_value=mock_socket): - with pytest.raises(upnp.UpnpError): - upnp.perform_m_search('127.0.0.1') - - def test_retrieve_location_from_ssdp(self, url_obj): - ctrl_location = url_obj.geturl() - parsed_location = urlparse(ctrl_location) - rsp = ('auth: gibberish\r\nlocation: {0}\r\n' - 'Content-Type: text/html\r\n\r\n').format(ctrl_location) - result = upnp._retrieve_location_from_ssdp(rsp) - assert result == parsed_location - - def test_retrieve_location_from_ssdp_no_header(self): - rsp = 'auth: gibberish\r\nContent-Type: application/json\r\n\r\n' - with pytest.raises(upnp.IGDError): - upnp._retrieve_location_from_ssdp(rsp) - - def test_retrieve_igd_profile(self, url_obj): - with mock.patch('urllib.request.urlopen') as mock_urlopen: - upnp._retrieve_igd_profile(url_obj) - mock_urlopen.assert_called_with(url_obj.geturl(), timeout=5) - - def test_retrieve_igd_profile_timeout(self, url_obj): - with mock.patch('urllib.request.urlopen') as mock_urlopen: - mock_urlopen.side_effect = socket.error('Timeout error') - with pytest.raises(upnp.IGDError): - upnp._retrieve_igd_profile(url_obj) - - def test_parse_igd_profile_service_type(self, igd_profile): - control_path, upnp_schema = upnp._parse_igd_profile(igd_profile) - assert control_path == '/upnp/control/wanpppcpppoa' - assert upnp_schema in ('WANPPPConnection', 'WANIPConnection',) - - def test_parse_igd_profile_no_ctrlurl(self, igd_profile): - igd_profile = igd_profile.replace('controlURL', 'nope') - with pytest.raises(upnp.IGDError): - control_path, upnp_schema = upnp._parse_igd_profile(igd_profile) - - def test_parse_igd_profile_no_schema(self, igd_profile): - igd_profile = igd_profile.replace('Connection', 'nope') - with pytest.raises(upnp.IGDError): - control_path, upnp_schema = upnp._parse_igd_profile(igd_profile) - - def test_create_open_message_parsable(self): - from xml.parsers.expat import ExpatError - msg, _ = upnp._create_open_message('127.0.0.1', 8888) - try: - upnp.parseString(msg) - except ExpatError as e: - pytest.fail('Incorrect XML message: {}'.format(e)) - - def test_create_open_message_contains_right_stuff(self): - settings = {'description': 'test desc', - 'protocol': 'test proto', - 'upnp_schema': 'test schema'} - msg, fn_name = upnp._create_open_message('127.0.0.1', 8888, **settings) - assert fn_name == 'AddPortMapping' - assert '127.0.0.1' in msg - assert '8888' in msg - assert settings['description'] in msg - assert settings['protocol'] in msg - assert settings['upnp_schema'] in msg - - def test_parse_for_errors_bad_rsp(self, httplib_response): - rsp = httplib_response(status=500) - with pytest.raises(upnp.IGDError) as err: - upnp._parse_for_errors(rsp) - assert 'Unable to parse' in str(err.value) - - def test_parse_for_errors_error(self, httplib_response): - soap_error = ('' - '500' - 'Bad request' - '') - rsp = httplib_response(status=500, body=soap_error) - with pytest.raises(upnp.IGDError) as err: - upnp._parse_for_errors(rsp) - assert 'SOAP request error' in str(err.value) - - def test_parse_for_errors_good_rsp(self, httplib_response): - rsp = httplib_response(status=200) - assert rsp == upnp._parse_for_errors(rsp) - - def test_send_requests_success(self): - with mock.patch( - 'util.UpnpPunch._send_soap_request') as mock_send_request: - mock_send_request.return_value = mock.MagicMock(status=200) - upnp._send_requests(['msg'], None, None, None) - - assert mock_send_request.called - - def test_send_requests_failed(self): - with mock.patch( - 'util.UpnpPunch._send_soap_request') as mock_send_request: - mock_send_request.return_value = mock.MagicMock(status=500) - with pytest.raises(upnp.UpnpError): - upnp._send_requests(['msg'], None, None, None) - - assert mock_send_request.called - - def test_collect_idg_data(self): - pass - - @mock.patch('util.UpnpPunch._get_local_ips') - @mock.patch('util.UpnpPunch._collect_idg_data') - @mock.patch('util.UpnpPunch._send_requests') - def test_ask_to_open_port_success(self, mock_send_requests, - mock_collect_idg, mock_local_ips): - mock_collect_idg.return_value = {'upnp_schema': 'schema-yo'} - mock_local_ips.return_value = ['192.168.0.12'] - - result = upnp.ask_to_open_port(retries=5) - - soap_msg = mock_send_requests.call_args[0][0][0][0] - - assert result is True - - assert mock_collect_idg.called - assert '192.168.0.12' in soap_msg - assert '15441' in soap_msg - assert 'schema-yo' in soap_msg - - @mock.patch('util.UpnpPunch._get_local_ips') - @mock.patch('util.UpnpPunch._collect_idg_data') - @mock.patch('util.UpnpPunch._send_requests') - def test_ask_to_open_port_failure(self, mock_send_requests, - mock_collect_idg, mock_local_ips): - mock_local_ips.return_value = ['192.168.0.12'] - mock_collect_idg.return_value = {'upnp_schema': 'schema-yo'} - mock_send_requests.side_effect = upnp.UpnpError() - - with pytest.raises(upnp.UpnpError): - upnp.ask_to_open_port() - - @mock.patch('util.UpnpPunch._collect_idg_data') - @mock.patch('util.UpnpPunch._send_requests') - def test_orchestrate_soap_request(self, mock_send_requests, - mock_collect_idg): - soap_mock = mock.MagicMock() - args = ['127.0.0.1', 31337, soap_mock, 'upnp-test', {'upnp_schema': - 'schema-yo'}] - mock_collect_idg.return_value = args[-1] - - upnp._orchestrate_soap_request(*args[:-1]) - - assert mock_collect_idg.called - soap_mock.assert_called_with( - *args[:2] + ['upnp-test', 'UDP', 'schema-yo']) - assert mock_send_requests.called - - @mock.patch('util.UpnpPunch._collect_idg_data') - @mock.patch('util.UpnpPunch._send_requests') - def test_orchestrate_soap_request_without_desc(self, mock_send_requests, - mock_collect_idg): - soap_mock = mock.MagicMock() - args = ['127.0.0.1', 31337, soap_mock, {'upnp_schema': 'schema-yo'}] - mock_collect_idg.return_value = args[-1] - - upnp._orchestrate_soap_request(*args[:-1]) - - assert mock_collect_idg.called - soap_mock.assert_called_with(*args[:2] + [None, 'UDP', 'schema-yo']) - assert mock_send_requests.called - - def test_create_close_message_parsable(self): - from xml.parsers.expat import ExpatError - msg, _ = upnp._create_close_message('127.0.0.1', 8888) - try: - upnp.parseString(msg) - except ExpatError as e: - pytest.fail('Incorrect XML message: {}'.format(e)) - - def test_create_close_message_contains_right_stuff(self): - settings = {'protocol': 'test proto', - 'upnp_schema': 'test schema'} - msg, fn_name = upnp._create_close_message('127.0.0.1', 8888, ** - settings) - assert fn_name == 'DeletePortMapping' - assert '8888' in msg - assert settings['protocol'] in msg - assert settings['upnp_schema'] in msg - - @mock.patch('util.UpnpPunch._get_local_ips') - @mock.patch('util.UpnpPunch._orchestrate_soap_request') - def test_communicate_with_igd_success(self, mock_orchestrate, - mock_get_local_ips): - mock_get_local_ips.return_value = ['192.168.0.12'] - upnp._communicate_with_igd() - assert mock_get_local_ips.called - assert mock_orchestrate.called - - @mock.patch('util.UpnpPunch._get_local_ips') - @mock.patch('util.UpnpPunch._orchestrate_soap_request') - def test_communicate_with_igd_succeed_despite_single_failure( - self, mock_orchestrate, mock_get_local_ips): - mock_get_local_ips.return_value = ['192.168.0.12'] - mock_orchestrate.side_effect = [upnp.UpnpError, None] - upnp._communicate_with_igd(retries=2) - assert mock_get_local_ips.called - assert mock_orchestrate.called - - @mock.patch('util.UpnpPunch._get_local_ips') - @mock.patch('util.UpnpPunch._orchestrate_soap_request') - def test_communicate_with_igd_total_failure(self, mock_orchestrate, - mock_get_local_ips): - mock_get_local_ips.return_value = ['192.168.0.12'] - mock_orchestrate.side_effect = [upnp.UpnpError, upnp.IGDError] - with pytest.raises(upnp.UpnpError): - upnp._communicate_with_igd(retries=2) - assert mock_get_local_ips.called - assert mock_orchestrate.called diff --git a/src/Test/TestUser.py b/src/Test/TestUser.py index e5ec5c8c..1fcdd1b7 100644 --- a/src/Test/TestUser.py +++ b/src/Test/TestUser.py @@ -7,7 +7,7 @@ from Crypt import CryptBitcoin class TestUser: def testAddress(self, user): assert user.master_address == "15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc" - address_index = 1458664252141532163166741013621928587528255888800826689784628722366466547364755811 + address_index = 1458664252141532163166741013621928587528255888800826689784628722366466547364755811L assert user.getAddressAuthIndex("15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc") == address_index # Re-generate privatekey based on address_index diff --git a/src/Test/TestWeb.py b/src/Test/TestWeb.py index 2ce66c98..e637fde4 100644 --- a/src/Test/TestWeb.py +++ b/src/Test/TestWeb.py @@ -1,10 +1,10 @@ -import urllib.request +import urllib import pytest try: from selenium.webdriver.support.ui import WebDriverWait - from selenium.webdriver.support.expected_conditions import staleness_of, title_is + from selenium.webdriver.support.expected_conditions import staleness_of from selenium.common.exceptions import NoSuchElementException except: pass @@ -18,61 +18,41 @@ class WaitForPageLoad(object): self.old_page = self.browser.find_element_by_tag_name('html') def __exit__(self, *args): - WebDriverWait(self.browser, 10).until(staleness_of(self.old_page)) + WebDriverWait(self.browser, 20).until(staleness_of(self.old_page)) -def getContextUrl(browser): - return browser.execute_script("return window.location.toString()") - - -def getUrl(url): - content = urllib.request.urlopen(url).read() - assert "server error" not in content.lower(), "Got a server error! " + repr(url) - return content - @pytest.mark.usefixtures("resetSettings") @pytest.mark.webtest class TestWeb: def testFileSecurity(self, site_url): - assert "Not Found" in getUrl("%s/media/sites.json" % site_url) - assert "Forbidden" in getUrl("%s/media/./sites.json" % site_url) - assert "Forbidden" in getUrl("%s/media/../config.py" % site_url) - assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url) - assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url) - assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url) + assert "Forbidden" in urllib.urlopen("%s/media/./sites.json" % site_url).read() + assert "Forbidden" in urllib.urlopen("%s/media/../config.py" % site_url).read() + assert "Forbidden" in urllib.urlopen("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url).read() + assert "Forbidden" in urllib.urlopen("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url).read() + assert "Forbidden" in urllib.urlopen("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url).read() + assert "Forbidden" in urllib.urlopen("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url).read() + assert "Forbidden" in urllib.urlopen("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url).read() + assert "Forbidden" in urllib.urlopen("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url).read() - assert "Not Found" in getUrl("%s/raw/sites.json" % site_url) - assert "Forbidden" in getUrl("%s/raw/./sites.json" % site_url) - assert "Forbidden" in getUrl("%s/raw/../config.py" % site_url) - assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url) - assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url) - assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url) - - assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url) - assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url) - assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url) - - assert "Forbidden" in getUrl("%s/content.db" % site_url) - assert "Forbidden" in getUrl("%s/./users.json" % site_url) - assert "Forbidden" in getUrl("%s/./key-rsa.pem" % site_url) - assert "Forbidden" in getUrl("%s/././././././././././//////sites.json" % site_url) + def testHomepage(self, browser, site_url): + browser.get("%s" % site_url) + assert browser.title == "ZeroHello - ZeroNet" def testLinkSecurity(self, browser, site_url): browser.get("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url) - WebDriverWait(browser, 10).until(title_is("ZeroHello - ZeroNet")) - assert getContextUrl(browser) == "%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url + assert browser.title == "ZeroHello - ZeroNet" + assert browser.current_url == "%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url # Switch to inner frame browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) - assert "wrapper_nonce" in getContextUrl(browser) - assert browser.find_element_by_id("script_output").text == "Result: Works" + assert "wrapper_nonce" in browser.current_url browser.switch_to.default_content() # Clicking on links without target browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) with WaitForPageLoad(browser): browser.find_element_by_id("link_to_current").click() - assert "wrapper_nonce" not in getContextUrl(browser) # The browser object back to default content + assert "wrapper_nonce" not in browser.current_url # The browser object back to default content assert "Forbidden" not in browser.page_source # Check if we have frame inside frame browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) @@ -84,22 +64,15 @@ class TestWeb: browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) with WaitForPageLoad(browser): browser.find_element_by_id("link_to_top").click() - assert "wrapper_nonce" not in getContextUrl(browser) # The browser object back to default content + assert "wrapper_nonce" not in browser.current_url # The browser object back to default content assert "Forbidden" not in browser.page_source browser.switch_to.default_content() # Try to escape from inner_frame browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) - assert "wrapper_nonce" in getContextUrl(browser) # Make sure we are inside of the inner-iframe + assert "wrapper_nonce" in browser.current_url # Make sure we are inside of the inner-iframe with WaitForPageLoad(browser): browser.execute_script("window.top.location = window.location") - assert "wrapper_nonce" in getContextUrl(browser) # We try to use nonce-ed html without iframe - assert " 0.1: - line_marker = "!" - elif since_last > 0.02: - line_marker = "*" - elif since_last > 0.01: - line_marker = "-" - else: - line_marker = " " - - since_start = time.time() - time_start - record.since_start = "%s%.3fs" % (line_marker, since_start) - - self.time_last = time.time() - return True - -log = logging.getLogger() -fmt = logging.Formatter(fmt='%(since_start)s %(thread_marker)s %(levelname)-8s %(name)s %(message)s %(thread_title)s') -[hndl.addFilter(TimeFilter()) for hndl in log.handlers] -[hndl.setFormatter(fmt) for hndl in log.handlers] - -from Site.Site import Site -from Site import SiteManager +from Site import Site from User import UserManager from File import FileServer from Connection import ConnectionServer from Crypt import CryptConnection -from Crypt import CryptBitcoin from Ui import UiWebsocket -from Tor import TorManager -from Content import ContentDb -from util import RateLimit -from Db import Db -from Debug import Debug +import gevent +from gevent import monkey +monkey.patch_all(thread=False) -gevent.get_hub().NOT_ERROR += (Debug.Notify,) - -def cleanup(): - Db.dbCloseAll() - for dir_path in [config.data_dir, config.data_dir + "-temp"]: - if os.path.isdir(dir_path): - for file_name in os.listdir(dir_path): - ext = file_name.rsplit(".", 1)[-1] - if ext not in ["csr", "pem", "srl", "db", "json", "tmp"]: - continue - file_path = dir_path + "/" + file_name - if os.path.isfile(file_path): - os.unlink(file_path) - -atexit_register(cleanup) @pytest.fixture(scope="session") def resetSettings(request): + os.chdir(os.path.abspath(os.path.dirname(__file__) + "/../..")) # Set working dir open("%s/sites.json" % config.data_dir, "w").write("{}") - open("%s/filters.json" % config.data_dir, "w").write("{}") open("%s/users.json" % config.data_dir, "w").write(""" { "15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc": { @@ -161,6 +54,11 @@ def resetSettings(request): } """) + def cleanup(): + os.unlink("%s/sites.json" % config.data_dir) + os.unlink("%s/users.json" % config.data_dir) + request.addfinalizer(cleanup) + @pytest.fixture(scope="session") def resetTempSettings(request): @@ -168,7 +66,6 @@ def resetTempSettings(request): if not os.path.isdir(data_dir_temp): os.mkdir(data_dir_temp) open("%s/sites.json" % data_dir_temp, "w").write("{}") - open("%s/filters.json" % data_dir_temp, "w").write("{}") open("%s/users.json" % data_dir_temp, "w").write(""" { "15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc": { @@ -182,136 +79,61 @@ def resetTempSettings(request): def cleanup(): os.unlink("%s/sites.json" % data_dir_temp) os.unlink("%s/users.json" % data_dir_temp) - os.unlink("%s/filters.json" % data_dir_temp) request.addfinalizer(cleanup) @pytest.fixture() -def site(request): - threads_before = [obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet)] - # Reset ratelimit - RateLimit.queue_db = {} - RateLimit.called_db = {} - +def site(): site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") - - # Always use original data - assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in site.storage.getPath("") # Make sure we dont delete everything - shutil.rmtree(site.storage.getPath(""), True) - shutil.copytree(site.storage.getPath("") + "-original", site.storage.getPath("")) - - # Add to site manager - SiteManager.site_manager.get("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") - site.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net - - def cleanup(): - site.delete() - site.content_manager.contents.db.close("Test cleanup") - site.content_manager.contents.db.timer_check_optional.kill() - SiteManager.site_manager.sites.clear() - db_path = "%s/content.db" % config.data_dir - os.unlink(db_path) - del ContentDb.content_dbs[db_path] - gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before]) - request.addfinalizer(cleanup) - - site.greenlet_manager.stopGreenlets() - site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") # Create new Site object to load content.json files - if not SiteManager.site_manager.sites: - SiteManager.site_manager.sites = {} - SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] = site - site.settings["serving"] = True return site @pytest.fixture() def site_temp(request): - threads_before = [obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet)] with mock.patch("Config.config.data_dir", config.data_dir + "-temp"): site_temp = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") - site_temp.settings["serving"] = True - site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net def cleanup(): - site_temp.delete() - site_temp.content_manager.contents.db.close("Test cleanup") - site_temp.content_manager.contents.db.timer_check_optional.kill() - db_path = "%s-temp/content.db" % config.data_dir - os.unlink(db_path) - del ContentDb.content_dbs[db_path] - gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before]) + site_temp.storage.deleteFiles() request.addfinalizer(cleanup) - site_temp.log = logging.getLogger("Temp:%s" % site_temp.address_short) return site_temp @pytest.fixture(scope="session") def user(): user = UserManager.user_manager.get() - if not user: - user = UserManager.user_manager.create() user.sites = {} # Reset user data return user @pytest.fixture(scope="session") -def browser(request): +def browser(): try: from selenium import webdriver - print("Starting chromedriver...") - options = webdriver.chrome.options.Options() - options.add_argument("--headless") - options.add_argument("--window-size=1920x1080") - options.add_argument("--log-level=1") - browser = webdriver.Chrome(executable_path=CHROMEDRIVER_PATH, service_log_path=os.path.devnull, options=options) - - def quit(): - browser.quit() - request.addfinalizer(quit) - except Exception as err: - raise pytest.skip("Test requires selenium + chromedriver: %s" % err) + browser = webdriver.PhantomJS(executable_path=PHANTOMJS_PATH, service_log_path=os.path.devnull) + browser.set_window_size(1400, 1000) + except Exception, err: + raise pytest.skip("Test requires selenium + phantomjs: %s" % err) return browser @pytest.fixture(scope="session") def site_url(): try: - urllib.request.urlopen(SITE_URL).read() - except Exception as err: + urllib.urlopen(SITE_URL).read() + except Exception, err: raise pytest.skip("Test requires zeronet client running: %s" % err) return SITE_URL -@pytest.fixture(params=['ipv4', 'ipv6']) +@pytest.fixture(scope="session") def file_server(request): - if request.param == "ipv4": - return request.getfixturevalue("file_server4") - else: - return request.getfixturevalue("file_server6") - - -@pytest.fixture -def file_server4(request): - time.sleep(0.1) + CryptConnection.manager.loadCerts() # Load and create certs + request.addfinalizer(CryptConnection.manager.removeCerts) # Remove cert files after end file_server = FileServer("127.0.0.1", 1544) - file_server.ip_external = "1.2.3.4" # Fake external ip - - def listen(): - ConnectionServer.start(file_server) - ConnectionServer.listen(file_server) - - gevent.spawn(listen) - # Wait for port opening - for retry in range(10): - time.sleep(0.1) # Port opening - try: - conn = file_server.getConnection("127.0.0.1", 1544) - conn.close() - break - except Exception as err: - print("FileServer6 startup error", Debug.formatException(err)) + gevent.spawn(lambda: ConnectionServer.start(file_server)) + time.sleep(0) # Port opening assert file_server.running - file_server.ip_incoming = {} # Reset flood protection def stop(): file_server.stop() @@ -319,179 +141,22 @@ def file_server4(request): return file_server -@pytest.fixture -def file_server6(request): - try: - sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) - sock.connect(("::1", 80, 1, 1)) - has_ipv6 = True - except OSError: - has_ipv6 = False - if not has_ipv6: - pytest.skip("Ipv6 not supported") - - - time.sleep(0.1) - file_server6 = FileServer("::1", 1544) - file_server6.ip_external = 'fca5:95d6:bfde:d902:8951:276e:1111:a22c' # Fake external ip - - def listen(): - ConnectionServer.start(file_server6) - ConnectionServer.listen(file_server6) - - gevent.spawn(listen) - # Wait for port opening - for retry in range(10): - time.sleep(0.1) # Port opening - try: - conn = file_server6.getConnection("::1", 1544) - conn.close() - break - except Exception as err: - print("FileServer6 startup error", Debug.formatException(err)) - assert file_server6.running - file_server6.ip_incoming = {} # Reset flood protection - - def stop(): - file_server6.stop() - request.addfinalizer(stop) - return file_server6 - - @pytest.fixture() -def ui_websocket(site, user): +def ui_websocket(site, file_server, user): class WsMock: def __init__(self): - self.result = gevent.event.AsyncResult() + self.result = None def send(self, data): - logging.debug("WsMock: Set result (data: %s) called by %s" % (data, Debug.formatStack())) - self.result.set(json.loads(data)["result"]) - - def getResult(self): - logging.debug("WsMock: Get result") - back = self.result.get() - logging.debug("WsMock: Got result (data: %s)" % back) - self.result = gevent.event.AsyncResult() - return back + self.result = json.loads(data)["result"] ws_mock = WsMock() - ui_websocket = UiWebsocket(ws_mock, site, None, user, None) + ui_websocket = UiWebsocket(ws_mock, site, file_server, user, None) def testAction(action, *args, **kwargs): - ui_websocket.handleRequest({"id": 0, "cmd": action, "params": list(args) if args else kwargs}) - return ui_websocket.ws.getResult() + func = getattr(ui_websocket, "action%s" % action) + func(0, *args, **kwargs) + return ui_websocket.ws.result ui_websocket.testAction = testAction return ui_websocket - - -@pytest.fixture(scope="session") -def tor_manager(): - try: - tor_manager = TorManager(fileserver_port=1544) - tor_manager.start() - assert tor_manager.conn is not None - tor_manager.startOnions() - except Exception as err: - raise pytest.skip("Test requires Tor with ControlPort: %s, %s" % (config.tor_controller, err)) - return tor_manager - - -@pytest.fixture() -def db(request): - db_path = "%s/zeronet.db" % config.data_dir - schema = { - "db_name": "TestDb", - "db_file": "%s/zeronet.db" % config.data_dir, - "maps": { - "data.json": { - "to_table": [ - "test", - {"node": "test", "table": "test_importfilter", "import_cols": ["test_id", "title"]} - ] - } - }, - "tables": { - "test": { - "cols": [ - ["test_id", "INTEGER"], - ["title", "TEXT"], - ["json_id", "INTEGER REFERENCES json (json_id)"] - ], - "indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"], - "schema_changed": 1426195822 - }, - "test_importfilter": { - "cols": [ - ["test_id", "INTEGER"], - ["title", "TEXT"], - ["json_id", "INTEGER REFERENCES json (json_id)"] - ], - "indexes": ["CREATE UNIQUE INDEX test_importfilter_id ON test_importfilter(test_id)"], - "schema_changed": 1426195822 - } - } - } - - if os.path.isfile(db_path): - os.unlink(db_path) - db = Db.Db(schema, db_path) - db.checkTables() - - def stop(): - db.close("Test db cleanup") - os.unlink(db_path) - - request.addfinalizer(stop) - return db - - -@pytest.fixture(params=["sslcrypto", "sslcrypto_fallback", "libsecp256k1"]) -def crypt_bitcoin_lib(request, monkeypatch): - monkeypatch.setattr(CryptBitcoin, "lib_verify_best", request.param) - CryptBitcoin.loadLib(request.param) - return CryptBitcoin - -@pytest.fixture(scope='function', autouse=True) -def logCaseStart(request): - global time_start - time_start = time.time() - logging.debug("---- Start test case: %s ----" % request._pyfuncitem) - yield None # Wait until all test done - - -# Workaround for pytest bug when logging in atexit/post-fixture handlers (I/O operation on closed file) -def workaroundPytestLogError(): - import _pytest.capture - write_original = _pytest.capture.EncodedFile.write - - def write_patched(obj, *args, **kwargs): - try: - write_original(obj, *args, **kwargs) - except ValueError as err: - if str(err) == "I/O operation on closed file": - pass - else: - raise err - - def flush_patched(obj, *args, **kwargs): - try: - obj.buffer.flush(*args, **kwargs) - except ValueError as err: - if str(err).startswith("I/O operation on closed file"): - pass - else: - raise err - - _pytest.capture.EncodedFile.write = write_patched - _pytest.capture.EncodedFile.flush = flush_patched - - -workaroundPytestLogError() - -@pytest.fixture(scope='session', autouse=True) -def disableLog(): - yield None # Wait until all test done - logging.getLogger('').setLevel(logging.getLevelName(logging.CRITICAL)) - diff --git a/src/Test/pytest.ini b/src/Test/pytest.ini index 0ffb385f..d09210d1 100644 --- a/src/Test/pytest.ini +++ b/src/Test/pytest.ini @@ -1,6 +1,5 @@ [pytest] python_files = Test*.py -addopts = -rsxX -v --durations=6 --capture=fd +addopts = -rsxX -v --durations=6 markers = - slow: mark a tests as slow. - webtest: mark a test as a webtest. + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/test_include/content.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/test_include/content.json deleted file mode 100644 index 814afdbf..00000000 --- a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/test_include/content.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT", - "files": { - "data.json": { - "sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", - "size": 505 - } - }, - "inner_path": "data/test_include/content.json", - "modified": 1470340816.513, - "signs": { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "GxF2ZD0DaMx+CuxafnnRx+IkWTrXubcmTHaJIPyemFpzCvbSo6DyjstN8T3qngFhYIZI/MkcG4ogStG0PLv6p3w=" - } -} \ No newline at end of file diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json deleted file mode 100644 index 67aaf584..00000000 --- a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT", - "cert_auth_type": "web", - "cert_sign": "HBsTrjTmv+zD1iY93tSci8n9DqdEtYwzxJmRppn4/b+RYktcANGm5tXPOb+Duw3AJcgWDcGUvQVgN1D9QAwIlCw=", - "cert_user_id": "toruser@zeroid.bit", - "files": { - "data.json": { - "sha512": "4868b5e6d70a55d137db71c2e276bda80437e0235ac670962acc238071296b45", - "size": 168 - } - }, - "files_optional": { - "peanut-butter-jelly-time.gif": { - "sha512": "a238fd27bda2a06f07f9f246954b34dcf82e6472aebdecc2c5dc1f01a50721ef", - "size": 1606 - } - }, - "inner_path": "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json", - "modified": 1470340817.676, - "optional": ".*\\.(jpg|png|gif)", - "signs": { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G6UOG3ne1hVe3mDGXHnWX8A1vKzH0XHD6LGMsshvNFVXGn003IFNLUL9dlb3XXJf3tyJGZncvGobzNpwBib08QY=" - } -} \ No newline at end of file diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json deleted file mode 100644 index 7436b6da..00000000 --- a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT", - "cert_auth_type": "web", - "cert_sign": "HBsTrjTmv+zD1iY93tSci8n9DqdEtYwzxJmRppn4/b+RYktcANGm5tXPOb+Duw3AJcgWDcGUvQVgN1D9QAwIlCw=", - "cert_user_id": "toruser@zeroid.bit", - "files": { - "data.json": { - "sha512": "4868b5e6d70a55d137db71c2e276bda80437e0235ac670962acc238071296b45", - "size": 168 - } - }, - "inner_path": "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - "modified": 1470340818.389, - "signs": { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G6oCzql6KWKAq2aSmZ1pm4SqvwL3e3LRdWxsvILrDc6VWpGZmVgbNn5qW18bA7fewhtA/oKc5+yYjGlTLLOWrB4=" - } -} \ No newline at end of file diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/content.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/content.json deleted file mode 100644 index 8c71b84a..00000000 --- a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/content.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT", - "files": {}, - "ignore": ".*", - "inner_path": "data/users/content.json", - "modified": 1470340815.228, - "signs": { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G25hsrlyTOy8PHKuovKDRC7puoBj/OLIZ3U4OJ01izkhE1BBQ+TOgxX96+HXoZGme2/P4IdEnYjc1rqIZ6O+nFk=" - }, - "user_contents": { - "cert_signers": { - "zeroid.bit": [ "1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz" ] - }, - "permission_rules": { - ".*": { - "files_allowed": "data.json", - "files_allowed_optional": ".*\\.(png|jpg|gif)", - "max_size": 10000, - "max_size_optional": 10000000, - "signers": [ "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" ] - }, - "bitid/.*@zeroid.bit": { "max_size": 40000 }, - "bitmsg/.*@zeroid.bit": { "max_size": 15000 } - }, - "permissions": { - "bad@zeroid.bit": false, - "nofish@zeroid.bit": { "max_size": 100000 } - } - } -} \ No newline at end of file diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/content.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/content.json similarity index 63% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/content.json rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/content.json index 786db098..9b3dbab8 100644 --- a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/content.json +++ b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/content.json @@ -1,133 +1,136 @@ { - "address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT", - "background-color": "white", - "description": "Blogging platform Demo", - "domain": "Blog.ZeroNetwork.bit", + "address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT", + "background-color": "white", + "description": "Blogging platform Demo", + "domain": "Blog.ZeroNetwork.bit", "files": { "css/all.css": { - "sha512": "65ddd3a2071a0f48c34783aa3b1bde4424bdea344630af05a237557a62bd55dc", + "sha512": "65ddd3a2071a0f48c34783aa3b1bde4424bdea344630af05a237557a62bd55dc", "size": 112710 - }, + }, "data-default/data.json": { - "sha512": "3f5c5a220bde41b464ab116cce0bd670dd0b4ff5fe4a73d1dffc4719140038f2", + "sha512": "3f5c5a220bde41b464ab116cce0bd670dd0b4ff5fe4a73d1dffc4719140038f2", "size": 196 - }, + }, "data-default/users/content-default.json": { - "sha512": "0603ce08f7abb92b3840ad0cf40e95ea0b3ed3511b31524d4d70e88adba83daa", + "sha512": "0603ce08f7abb92b3840ad0cf40e95ea0b3ed3511b31524d4d70e88adba83daa", "size": 679 - }, + }, "data/data.json": { - "sha512": "0f2321c905b761a05c360a389e1de149d952b16097c4ccf8310158356e85fb52", + "sha512": "0f2321c905b761a05c360a389e1de149d952b16097c4ccf8310158356e85fb52", "size": 31126 - }, + }, "data/img/autoupdate.png": { - "sha512": "d2b4dc8e0da2861ea051c0c13490a4eccf8933d77383a5b43de447c49d816e71", + "sha512": "d2b4dc8e0da2861ea051c0c13490a4eccf8933d77383a5b43de447c49d816e71", "size": 24460 - }, + }, "data/img/direct_domains.png": { - "sha512": "5f14b30c1852735ab329b22496b1e2ea751cb04704789443ad73a70587c59719", + "sha512": "5f14b30c1852735ab329b22496b1e2ea751cb04704789443ad73a70587c59719", "size": 16185 - }, + }, "data/img/domain.png": { - "sha512": "ce87e0831f4d1e95a95d7120ca4d33f8273c6fce9f5bbedf7209396ea0b57b6a", + "sha512": "ce87e0831f4d1e95a95d7120ca4d33f8273c6fce9f5bbedf7209396ea0b57b6a", "size": 11881 - }, + }, "data/img/memory.png": { - "sha512": "dd56515085b4a79b5809716f76f267ec3a204be3ee0d215591a77bf0f390fa4e", + "sha512": "dd56515085b4a79b5809716f76f267ec3a204be3ee0d215591a77bf0f390fa4e", "size": 12775 - }, + }, "data/img/multiuser.png": { - "sha512": "88e3f795f9b86583640867897de6efc14e1aa42f93e848ed1645213e6cc210c6", + "sha512": "88e3f795f9b86583640867897de6efc14e1aa42f93e848ed1645213e6cc210c6", "size": 29480 - }, + }, "data/img/progressbar.png": { - "sha512": "23d592ae386ce14158cec34d32a3556771725e331c14d5a4905c59e0fe980ebf", + "sha512": "23d592ae386ce14158cec34d32a3556771725e331c14d5a4905c59e0fe980ebf", "size": 13294 - }, + }, "data/img/slides.png": { - "sha512": "1933db3b90ab93465befa1bd0843babe38173975e306286e08151be9992f767e", + "sha512": "1933db3b90ab93465befa1bd0843babe38173975e306286e08151be9992f767e", "size": 14439 - }, + }, "data/img/slots_memory.png": { - "sha512": "82a250e6da909d7f66341e5b5c443353958f86728cd3f06e988b6441e6847c29", + "sha512": "82a250e6da909d7f66341e5b5c443353958f86728cd3f06e988b6441e6847c29", "size": 9488 - }, + }, "data/img/trayicon.png": { - "sha512": "e7ae65bf280f13fb7175c1293dad7d18f1fcb186ebc9e1e33850cdaccb897b8f", + "sha512": "e7ae65bf280f13fb7175c1293dad7d18f1fcb186ebc9e1e33850cdaccb897b8f", "size": 19040 - }, + }, "dbschema.json": { - "sha512": "2e9466d8aa1f340c91203b4ddbe9b6669879616a1b8e9571058a74195937598d", - "size": 1527 - }, + "sha512": "7b756e8e475d4d6b345a24e2ae14254f5c6f4aa67391a94491a026550fe00df8", + "size": 1529 + }, "img/loading.gif": { - "sha512": "8a42b98962faea74618113166886be488c09dad10ca47fe97005edc5fb40cc00", + "sha512": "8a42b98962faea74618113166886be488c09dad10ca47fe97005edc5fb40cc00", "size": 723 - }, + }, "index.html": { - "sha512": "c4039ebfc4cb6f116cac05e803a18644ed70404474a572f0d8473f4572f05df3", + "sha512": "c4039ebfc4cb6f116cac05e803a18644ed70404474a572f0d8473f4572f05df3", "size": 4667 - }, + }, "js/all.js": { - "sha512": "034c97535f3c9b3fbebf2dcf61a38711dae762acf1a99168ae7ddc7e265f582c", + "sha512": "034c97535f3c9b3fbebf2dcf61a38711dae762acf1a99168ae7ddc7e265f582c", "size": 201178 } - }, + }, "files_optional": { "data/img/zeroblog-comments.png": { - "sha512": "efe4e815a260e555303e5c49e550a689d27a8361f64667bd4a91dbcccb83d2b4", + "sha512": "efe4e815a260e555303e5c49e550a689d27a8361f64667bd4a91dbcccb83d2b4", "size": 24001 - }, + }, "data/img/zeroid.png": { - "sha512": "b46d541a9e51ba2ddc8a49955b7debbc3b45fd13467d3c20ef104e9d938d052b", + "sha512": "b46d541a9e51ba2ddc8a49955b7debbc3b45fd13467d3c20ef104e9d938d052b", "size": 18875 - }, + }, "data/img/zeroname.png": { - "sha512": "bab45a1bb2087b64e4f69f756b2ffa5ad39b7fdc48c83609cdde44028a7a155d", + "sha512": "bab45a1bb2087b64e4f69f756b2ffa5ad39b7fdc48c83609cdde44028a7a155d", "size": 36031 - }, + }, "data/img/zerotalk-mark.png": { - "sha512": "a335b2fedeb8d291ca68d3091f567c180628e80f41de4331a5feb19601d078af", + "sha512": "a335b2fedeb8d291ca68d3091f567c180628e80f41de4331a5feb19601d078af", "size": 44862 - }, + }, "data/img/zerotalk-upvote.png": { - "sha512": "b1ffd7f948b4f99248dde7efe256c2efdfd997f7e876fb9734f986ef2b561732", + "sha512": "b1ffd7f948b4f99248dde7efe256c2efdfd997f7e876fb9734f986ef2b561732", "size": 41092 - }, + }, "data/img/zerotalk.png": { - "sha512": "54d10497a1ffca9a4780092fd1bd158c15f639856d654d2eb33a42f9d8e33cd8", + "sha512": "54d10497a1ffca9a4780092fd1bd158c15f639856d654d2eb33a42f9d8e33cd8", "size": 26606 - }, + }, "data/optional.txt": { - "sha512": "c6f81db0e9f8206c971c9e5826e3ba823ffbb1a3a900f8047652a8bf78ea98fd", + "sha512": "c6f81db0e9f8206c971c9e5826e3ba823ffbb1a3a900f8047652a8bf78ea98fd", "size": 6 } - }, - "ignore": "((js|css)/(?!all.(js|css))|data/.*db|data/users/.*/.*|data/test_include/.*)", + }, + "ignore": "((js|css)/(?!all.(js|css))|data/.*db|data/users/.*/.*|data/test_include/.*)", "includes": { "data/test_include/content.json": { - "added": 1424976057, - "files_allowed": "data.json", - "includes_allowed": false, - "max_size": 20000, - "signers": ["15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo"], - "signers_required": 1, - "user_id": 47, + "added": 1424976057, + "files_allowed": "data.json", + "includes_allowed": false, + "max_size": 20000, + "signers": [ "15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo" ], + "signers_required": 1, + "user_id": 47, "user_name": "test" - }, + }, "data/users/content.json": { - "signers": ["1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f"], + "signers": [ "1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f" ], "signers_required": 1 } - }, - "inner_path": "content.json", - "modified": 1503257990, - "optional": "(data/img/zero.*|data/optional.*)", - "signers_sign": "HDNmWJHM2diYln4pkdL+qYOvgE7MdwayzeG+xEUZBgp1HtOjBJS+knDEVQsBkjcOPicDG2it1r6R1eQrmogqSP0=", + }, + "modified": 1447360051.327, + "optional": "(data/img/zero.*|data/optional.txt)", + "sign": [ + 56704502697521630823845805057259088854630715424341172278564490141452525844500, + 111213707061262926968239474633357178510296590563162180659113023094835495771512 + ], + "signers_sign": "HDNmWJHM2diYln4pkdL+qYOvgE7MdwayzeG+xEUZBgp1HtOjBJS+knDEVQsBkjcOPicDG2it1r6R1eQrmogqSP0=", "signs": { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G4Uq365UBliQG66ygip1jNGYqW6Eh9Mm7nLguDFqAgk/Hksq/ruqMf9rXv78mgUfPBvL2+XgDKYvFDtlykPFZxk=" - }, - "signs_required": 1, - "title": "ZeroBlog", - "zeronet_version": "0.5.7" + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G9Q5OVyYWlrvBX/s3AQTwchJr2ZefBwQdQEWyhkBJKsMDLZS6nt7o3hBsxm26FpKaBmpOfGpg2O1MQv6U0nUkiI=" + }, + "signs_required": 1, + "title": "ZeroBlog", + "zeronet_version": "0.3.3" } \ No newline at end of file diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/css/all.css b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/css/all.css similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/css/all.css rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/css/all.css diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data-default/data.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data-default/data.json similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data-default/data.json rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data-default/data.json diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data-default/users/content-default.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data-default/users/content-default.json similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data-default/users/content-default.json rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data-default/users/content-default.json diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/data.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/data.json similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/data.json rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/data.json diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/autoupdate.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/autoupdate.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/autoupdate.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/autoupdate.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/direct_domains.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/direct_domains.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/direct_domains.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/direct_domains.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/domain.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/domain.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/domain.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/domain.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/memory.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/memory.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/memory.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/memory.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/multiuser.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/multiuser.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/multiuser.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/multiuser.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/progressbar.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/progressbar.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/progressbar.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/progressbar.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/slides.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/slides.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/slides.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/slides.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/slots_memory.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/slots_memory.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/slots_memory.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/slots_memory.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/trayicon.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/trayicon.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/trayicon.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/trayicon.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/zeroblog-comments.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/zeroblog-comments.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/zeroblog-comments.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/zeroblog-comments.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/zeroid.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/zeroid.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/zeroid.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/zeroid.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/zeroname.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/zeroname.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/zeroname.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/zeroname.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/zerotalk-mark.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/zerotalk-mark.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/zerotalk-mark.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/zerotalk-mark.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/zerotalk-upvote.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/zerotalk-upvote.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/zerotalk-upvote.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/zerotalk-upvote.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/zerotalk.png b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/zerotalk.png similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/img/zerotalk.png rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/img/zerotalk.png diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/optional.txt b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/optional.txt similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/optional.txt rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/optional.txt diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/test_include/content.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/test_include/content.json new file mode 100644 index 00000000..b0bd92e8 --- /dev/null +++ b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/test_include/content.json @@ -0,0 +1,12 @@ +{ + "files": { + "data.json": { + "sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", + "size": 505 + } + }, + "modified": 1443088412.024, + "signs": { + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "HPpRa/7ic/03aJ6vfz3zt3ezsnkDeaet85HGS3Rm9vCXWGsdOXboMynb/sZcTfPMC1bQ3zLRdUNMqmifKw/gnNg=" + } +} \ No newline at end of file diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/test_include/data.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/test_include/data.json similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/test_include/data.json rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/test_include/data.json diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/data.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/data.json similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/data.json rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/data.json diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json new file mode 100644 index 00000000..cb343bb2 --- /dev/null +++ b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json @@ -0,0 +1,22 @@ +{ + "cert_auth_type": "web", + "cert_sign": "HBsTrjTmv+zD1iY93tSci8n9DqdEtYwzxJmRppn4/b+RYktcANGm5tXPOb+Duw3AJcgWDcGUvQVgN1D9QAwIlCw=", + "cert_user_id": "toruser@zeroid.bit", + "files": { + "data.json": { + "sha512": "4868b5e6d70a55d137db71c2e276bda80437e0235ac670962acc238071296b45", + "size": 168 + } + }, + "files_optional": { + "peanut-butter-jelly-time.gif": { + "sha512": "a238fd27bda2a06f07f9f246954b34dcf82e6472aebdecc2c5dc1f01a50721ef", + "size": 1606 + } + }, + "modified": 1447360055.489, + "optional": ".*\\.(jpg|png|gif)", + "signs": { + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "HKdnxBP3KnBM57lsCFtqV2C2k60zWzD9hNVfvbY7uytzvGiFhFaV6VP4qcvhCWTBUnbuVzk3xPNFOctSdUCcoPk=" + } +} \ No newline at end of file diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/data.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/data.json similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/data.json rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/data.json diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json new file mode 100644 index 00000000..f14b89c3 --- /dev/null +++ b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json @@ -0,0 +1,15 @@ +{ + "cert_auth_type": "web", + "cert_sign": "HBsTrjTmv+zD1iY93tSci8n9DqdEtYwzxJmRppn4/b+RYktcANGm5tXPOb+Duw3AJcgWDcGUvQVgN1D9QAwIlCw=", + "cert_user_id": "toruser@zeroid.bit", + "files": { + "data.json": { + "sha512": "4868b5e6d70a55d137db71c2e276bda80437e0235ac670962acc238071296b45", + "size": 168 + } + }, + "modified": 1447360055.88, + "signs": { + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G6HTP9PzwOM2Wee+FpXWBEHtj0MM+7qdBYvioW94ehZBjp8hUI1hQE4JGWSGm9ra+LIg5Cf6jPxLa8onQscn5z0=" + } +} \ No newline at end of file diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/data.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/data.json similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/data.json rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/data.json diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/content.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/content.json new file mode 100644 index 00000000..d3531e04 --- /dev/null +++ b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/users/content.json @@ -0,0 +1,28 @@ +{ + "files": {}, + "ignore": ".*", + "modified": 1447360051.802, + "signs": { + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G9Io0btASe4xDPfiCMpP8TwNZ/35BTNlCQqIRpMtXtQjnNEEEJ5H4eFdDma+GMcnzT9SSxmQ07sG8fQL61r2H/o=" + }, + "user_contents": { + "cert_signers": { + "zeroid.bit": [ "1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz" ] + }, + "permission_rules": { + ".*": { + "files_allowed": "data.json", + "files_allowed_optional": ".*\\.(png|jpg|gif)", + "max_size": 10000, + "max_size_optional": 10000000, + "signers": [ "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" ] + }, + "bitid/.*@zeroid.bit": { "max_size": 40000 }, + "bitmsg/.*@zeroid.bit": { "max_size": 15000 } + }, + "permissions": { + "bad@zeroid.bit": false, + "nofish@zeroid.bit": { "max_size": 100000 } + } + } +} \ No newline at end of file diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/zeroblog.db b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/zeroblog.db new file mode 100644 index 00000000..aa415555 Binary files /dev/null and b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/zeroblog.db differ diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/dbschema.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/dbschema.json similarity index 90% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/dbschema.json rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/dbschema.json index 3d1cdd7a..dc196a3a 100644 --- a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/dbschema.json +++ b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/dbschema.json @@ -1,15 +1,15 @@ { - "db_name": "ZeroBlog", + "db_name": "ZeroID", "db_file": "data/zeroblog.db", "version": 2, "maps": { "users/.+/data.json": { - "to_table": [ + "to_table": [ "comment", - {"node": "comment_vote", "table": "comment_vote", "key_col": "comment_uri", "val_col": "vote"} + {"node": "comment_vote", "table": "comment_vote", "key_col": "comment_uri", "val_col": "vote"} ] }, - "users/.+/content.json": { + "users/.+/content.json": { "to_keyvalue": [ "cert_user_id" ] }, "data.json": { @@ -21,7 +21,7 @@ "tables": { "comment": { "cols": [ - ["comment_id", "INTEGER"], + ["comment_id", "INTEGER"], ["post_id", "INTEGER"], ["body", "TEXT"], ["date_added", "INTEGER"], diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/img/loading.gif b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/img/loading.gif similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/img/loading.gif rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/img/loading.gif diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/index.html b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/index.html similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/index.html rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/index.html diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/js/all.js b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/js/all.js similarity index 100% rename from src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/js/all.js rename to src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/js/all.js diff --git a/src/Tor/TorManager.py b/src/Tor/TorManager.py deleted file mode 100644 index 865d8fbf..00000000 --- a/src/Tor/TorManager.py +++ /dev/null @@ -1,311 +0,0 @@ -import logging -import re -import socket -import binascii -import sys -import os -import time -import random -import subprocess -import atexit - -import gevent - -from Config import config - -from lib import Ed25519 -from Crypt import CryptTor - -from Site import SiteManager -import socks -from gevent.lock import RLock -from Debug import Debug -from Plugin import PluginManager - - -@PluginManager.acceptPlugins -class TorManager(object): - def __init__(self, fileserver_ip=None, fileserver_port=None): - self.privatekeys = {} # Onion: Privatekey - self.site_onions = {} # Site address: Onion - self.tor_exe = "tools/tor/tor.exe" - self.has_meek_bridges = os.path.isfile("tools/tor/PluggableTransports/meek-client.exe") - self.tor_process = None - self.log = logging.getLogger("TorManager") - self.start_onions = None - self.conn = None - self.lock = RLock() - self.starting = True - self.connecting = True - self.status = None - self.event_started = gevent.event.AsyncResult() - - if config.tor == "disable": - self.enabled = False - self.start_onions = False - self.setStatus("Disabled") - else: - self.enabled = True - self.setStatus("Waiting") - - if fileserver_port: - self.fileserver_port = fileserver_port - else: - self.fileserver_port = config.fileserver_port - - self.ip, self.port = config.tor_controller.rsplit(":", 1) - self.port = int(self.port) - - self.proxy_ip, self.proxy_port = config.tor_proxy.rsplit(":", 1) - self.proxy_port = int(self.proxy_port) - - def start(self): - self.log.debug("Starting (Tor: %s)" % config.tor) - self.starting = True - try: - if not self.connect(): - raise Exception(self.status) - self.log.debug("Tor proxy port %s check ok" % config.tor_proxy) - except Exception as err: - if sys.platform.startswith("win") and os.path.isfile(self.tor_exe): - self.log.info("Starting self-bundled Tor, due to Tor proxy port %s check error: %s" % (config.tor_proxy, err)) - # Change to self-bundled Tor ports - self.port = 49051 - self.proxy_port = 49050 - if config.tor == "always": - socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", self.proxy_port) - self.enabled = True - if not self.connect(): - self.startTor() - else: - self.log.info("Disabling Tor, because error while accessing Tor proxy at port %s: %s" % (config.tor_proxy, err)) - self.enabled = False - - def setStatus(self, status): - self.status = status - if "main" in sys.modules: # import main has side-effects, breaks tests - import main - if "ui_server" in dir(main): - main.ui_server.updateWebsocket() - - def startTor(self): - if sys.platform.startswith("win"): - try: - self.log.info("Starting Tor client %s..." % self.tor_exe) - tor_dir = os.path.dirname(self.tor_exe) - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - cmd = r"%s -f torrc --defaults-torrc torrc-defaults --ignore-missing-torrc" % self.tor_exe - if config.tor_use_bridges: - cmd += " --UseBridges 1" - - self.tor_process = subprocess.Popen(cmd, cwd=tor_dir, close_fds=True, startupinfo=startupinfo) - for wait in range(1, 3): # Wait for startup - time.sleep(wait * 0.5) - self.enabled = True - if self.connect(): - if self.isSubprocessRunning(): - self.request("TAKEOWNERSHIP") # Shut down Tor client when controll connection closed - break - # Terminate on exit - atexit.register(self.stopTor) - except Exception as err: - self.log.error("Error starting Tor client: %s" % Debug.formatException(str(err))) - self.enabled = False - self.starting = False - self.event_started.set(False) - return False - - def isSubprocessRunning(self): - return self.tor_process and self.tor_process.pid and self.tor_process.poll() is None - - def stopTor(self): - self.log.debug("Stopping...") - try: - if self.isSubprocessRunning(): - self.request("SIGNAL SHUTDOWN") - except Exception as err: - self.log.error("Error stopping Tor: %s" % err) - - def connect(self): - if not self.enabled: - return False - self.site_onions = {} - self.privatekeys = {} - - return self.connectController() - - def connectController(self): - if "socket_noproxy" in dir(socket): # Socket proxy-patched, use non-proxy one - conn = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM) - else: - conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - - self.log.debug("Connecting to Tor Controller %s:%s" % (self.ip, self.port)) - self.connecting = True - try: - with self.lock: - conn.connect((self.ip, self.port)) - - # Auth cookie file - res_protocol = self.send("PROTOCOLINFO", conn) - cookie_match = re.search('COOKIEFILE="(.*?)"', res_protocol) - - if config.tor_password: - res_auth = self.send('AUTHENTICATE "%s"' % config.tor_password, conn) - elif cookie_match: - cookie_file = cookie_match.group(1).encode("ascii").decode("unicode_escape") - if not os.path.isfile(cookie_file) and self.tor_process: - # Workaround for tor client cookie auth file utf8 encoding bug (https://github.com/torproject/stem/issues/57) - cookie_file = os.path.dirname(self.tor_exe) + "\\data\\control_auth_cookie" - auth_hex = binascii.b2a_hex(open(cookie_file, "rb").read()) - res_auth = self.send("AUTHENTICATE %s" % auth_hex.decode("utf8"), conn) - else: - res_auth = self.send("AUTHENTICATE", conn) - - if "250 OK" not in res_auth: - raise Exception("Authenticate error %s" % res_auth) - - # Version 0.2.7.5 required because ADD_ONION support - res_version = self.send("GETINFO version", conn) - version = re.search(r'version=([0-9\.]+)', res_version).group(1) - if float(version.replace(".", "0", 2)) < 207.5: - raise Exception("Tor version >=0.2.7.5 required, found: %s" % version) - - self.setStatus("Connected (%s)" % res_auth) - self.event_started.set(True) - self.starting = False - self.connecting = False - self.conn = conn - except Exception as err: - self.conn = None - self.setStatus("Error (%s)" % str(err)) - self.log.warning("Tor controller connect error: %s" % Debug.formatException(str(err))) - self.enabled = False - return self.conn - - def disconnect(self): - if self.conn: - self.conn.close() - self.conn = None - - def startOnions(self): - if self.enabled: - self.log.debug("Start onions") - self.start_onions = True - self.getOnion("global") - - # Get new exit node ip - def resetCircuits(self): - res = self.request("SIGNAL NEWNYM") - if "250 OK" not in res: - self.setStatus("Reset circuits error (%s)" % res) - self.log.error("Tor reset circuits error: %s" % res) - - def addOnion(self): - if len(self.privatekeys) >= config.tor_hs_limit: - return random.choice([key for key in list(self.privatekeys.keys()) if key != self.site_onions.get("global")]) - - result = self.makeOnionAndKey() - if result: - onion_address, onion_privatekey = result - self.privatekeys[onion_address] = onion_privatekey - self.setStatus("OK (%s onions running)" % len(self.privatekeys)) - SiteManager.peer_blacklist.append((onion_address + ".onion", self.fileserver_port)) - return onion_address - else: - return False - - def makeOnionAndKey(self): - res = self.request("ADD_ONION NEW:ED25519-V3 port=%s" % self.fileserver_port) - match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=ED25519-V3:(.*?)[\r\n]", res, re.DOTALL) - if match: - onion_address, onion_privatekey = match.groups() - return (onion_address, onion_privatekey) - else: - self.setStatus("AddOnion error (%s)" % res) - self.log.error("Tor addOnion error: %s" % res) - return False - - def delOnion(self, address): - res = self.request("DEL_ONION %s" % address) - if "250 OK" in res: - del self.privatekeys[address] - self.setStatus("OK (%s onion running)" % len(self.privatekeys)) - return True - else: - self.setStatus("DelOnion error (%s)" % res) - self.log.error("Tor delOnion error: %s" % res) - self.disconnect() - return False - - def request(self, cmd): - with self.lock: - if not self.enabled: - return False - if not self.conn: - if not self.connect(): - return "" - return self.send(cmd) - - def send(self, cmd, conn=None): - if not conn: - conn = self.conn - self.log.debug("> %s" % cmd) - back = "" - for retry in range(2): - try: - conn.sendall(b"%s\r\n" % cmd.encode("utf8")) - while not back.endswith("250 OK\r\n"): - back += conn.recv(1024 * 64).decode("utf8") - break - except Exception as err: - self.log.error("Tor send error: %s, reconnecting..." % err) - if not self.connecting: - self.disconnect() - time.sleep(1) - self.connect() - back = None - if back: - self.log.debug("< %s" % back.strip()) - return back - - def getPrivatekey(self, address): - return self.privatekeys[address] - - def getPublickey(self, address): - return CryptTor.privatekeyToPublickey(self.privatekeys[address]) - - def getOnion(self, site_address): - if not self.enabled: - return None - - if config.tor == "always": # Different onion for every site - onion = self.site_onions.get(site_address) - else: # Same onion for every site - onion = self.site_onions.get("global") - site_address = "global" - - if not onion: - with self.lock: - self.site_onions[site_address] = self.addOnion() - onion = self.site_onions[site_address] - self.log.debug("Created new hidden service for %s: %s" % (site_address, onion)) - - return onion - - # Creates and returns a - # socket that has connected to the Tor Network - def createSocket(self, onion, port): - if not self.enabled: - return False - self.log.debug("Creating new Tor socket to %s:%s" % (onion, port)) - if self.starting: - self.log.debug("Waiting for startup...") - self.event_started.get() - if config.tor == "always": # Every socket is proxied by default, in this mode - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - else: - sock = socks.socksocket() - sock.set_proxy(socks.SOCKS5, self.proxy_ip, self.proxy_port) - return sock diff --git a/src/Tor/__init__.py b/src/Tor/__init__.py deleted file mode 100644 index d0fcffaf..00000000 --- a/src/Tor/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .TorManager import TorManager \ No newline at end of file diff --git a/src/Translate/Translate.py b/src/Translate/Translate.py deleted file mode 100644 index e73f9be1..00000000 --- a/src/Translate/Translate.py +++ /dev/null @@ -1,135 +0,0 @@ -import os -import json -import logging -import inspect -import re -import html -import string - -from Config import config - -translates = [] - - -class EscapeProxy(dict): - # Automatically escape the accessed string values - def __getitem__(self, key): - val = dict.__getitem__(self, key) - if type(val) in (str, str): - return html.escape(val) - elif type(val) is dict: - return EscapeProxy(val) - elif type(val) is list: - return EscapeProxy(enumerate(val)) # Convert lists to dict - else: - return val - - -class Translate(dict): - def __init__(self, lang_dir=None, lang=None): - if not lang_dir: - lang_dir = os.path.dirname(__file__) + "/languages/" - if not lang: - lang = config.language - self.lang = lang - self.lang_dir = lang_dir - self.setLanguage(lang) - self.formatter = string.Formatter() - - if config.debug: - # Auto reload FileRequest on change - from Debug import DebugReloader - DebugReloader.watcher.addCallback(self.load) - - translates.append(self) - - def setLanguage(self, lang): - self.lang = re.sub("[^a-z-]", "", lang) - self.lang_file = self.lang_dir + "%s.json" % lang - self.load() - - def __repr__(self): - return "" % self.lang - - def load(self): - if self.lang == "en": - data = {} - dict.__init__(self, data) - self.clear() - elif os.path.isfile(self.lang_file): - try: - data = json.load(open(self.lang_file, encoding="utf8")) - logging.debug("Loaded translate file: %s (%s entries)" % (self.lang_file, len(data))) - except Exception as err: - logging.error("Error loading translate file %s: %s" % (self.lang_file, err)) - data = {} - dict.__init__(self, data) - else: - data = {} - dict.__init__(self, data) - self.clear() - logging.debug("Translate file not exists: %s" % self.lang_file) - - def format(self, s, kwargs, nested=False): - kwargs["_"] = self - if nested: - back = self.formatter.vformat(s, [], kwargs) # PY3 TODO: Change to format_map - return self.formatter.vformat(back, [], kwargs) - else: - return self.formatter.vformat(s, [], kwargs) - - def formatLocals(self, s, nested=False): - kwargs = inspect.currentframe().f_back.f_locals - return self.format(s, kwargs, nested=nested) - - def __call__(self, s, kwargs=None, nested=False, escape=True): - if not kwargs: - kwargs = inspect.currentframe().f_back.f_locals - if escape: - kwargs = EscapeProxy(kwargs) - return self.format(s, kwargs, nested=nested) - - def __missing__(self, key): - return key - - def pluralize(self, value, single, multi): - if value > 1: - return self[multi].format(value) - else: - return self[single].format(value) - - def translateData(self, data, translate_table=None, mode="js"): - if not translate_table: - translate_table = self - - patterns = [] - for key, val in list(translate_table.items()): - if key.startswith("_("): # Problematic string: only match if called between _(" ") function - key = key.replace("_(", "").replace(")", "").replace(", ", '", "') - translate_table[key] = "|" + val - patterns.append(re.escape(key)) - - def replacer(match): - target = translate_table[match.group(1)] - if mode == "js": - if target and target[0] == "|": # Strict string match - if match.string[match.start() - 2] == "_": # Only if the match if called between _(" ") function - return '"' + target[1:] + '"' - else: - return '"' + match.group(1) + '"' - return '"' + target + '"' - else: - return match.group(0)[0] + target + match.group(0)[-1] - - if mode == "html": - pattern = '[">](' + "|".join(patterns) + ')["<]' - else: - pattern = '"(' + "|".join(patterns) + ')"' - data = re.sub(pattern, replacer, data) - - if mode == "html": - data = data.replace("lang={lang}", "lang=%s" % self.lang) # lang get parameter to .js file to avoid cache - - return data - -translate = Translate() diff --git a/src/Translate/__init__.py b/src/Translate/__init__.py deleted file mode 100644 index ba0ab6d4..00000000 --- a/src/Translate/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .Translate import * \ No newline at end of file diff --git a/src/Translate/languages/da.json b/src/Translate/languages/da.json deleted file mode 100644 index 8e6f0845..00000000 --- a/src/Translate/languages/da.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Tillykke, din port ({0}) er ÃĨben.
    Du er nu fuld klient pÃĨ ZeroNet!", - "Tor mode active, every connection using Onion route.": "TOR er aktiv, alle forbindelser anvender Onions.", - "Successfully started Tor onion hidden services.": "OK. Startede TOR skjult onion service.", - "Unable to start hidden services, please check your config.": "Fejl. Kunne ikke starte TOR skjult onion service. Tjek din opsÃĻtning!", - "For faster connections open {0} port on your router.": "Åben port {0} pÃĨ din router for hurtigere forbindelse.", - "Your connection is restricted. Please, open {0} port on your router": "BegrÃĻnset forbindelse. Åben venligst port {0} pÃĨ din router", - "or configure Tor to become a full member of the ZeroNet network.": "eller opsÃĻt TOR for fuld adgang til ZeroNet!", - - "Select account you want to use in this site:": "VÃĻlg bruger til brug pÃĨ denne side:", - "currently selected": "nuvÃĻrende bruger", - "Unique to site": "Unik pÃĨ siden", - - "Content signing failed": "Signering af indhold fejlede", - "Content publish queued for {0:.0f} seconds.": "Indhold i kø for offentliggørelse i {0:.0f} sekunder.", - "Content published to {0} peers.": "Indhold offentliggjort til {0} klienter.", - "No peers found, but your content is ready to access.": "Ingen klienter fundet, men dit indhold er klar til hentning.", - "Your network connection is restricted. Please, open {0} port": "Din forbindelse er begrÃĻnset. Åben venligst port {0}", - "on your router to make your site accessible for everyone.": "pÃĨ din router for at dele din side med alle.", - "Content publish failed.": "Offentliggørelse af indhold fejlede.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Denne fil er endnu ikke delt fÃĻrdig. Tidligere indhold kan gÃĨ tabt hvis du skriver til filen nu.", - "Write content anyway": "Del indhold alligevel", - "New certificate added:": "Nyt certifikat oprettet:", - "You current certificate:": "Dit nuvÃĻrende certifikat: ", - "Change it to {auth_type}/{auth_user_name}@{domain}": "Skift certificat til {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certifikat ÃĻndret til {auth_type}/{auth_user_name}@{domain}.", - "Site cloned": "Side klonet", - - "You have successfully changed the web interface's language!": "OK. Du har nu skiftet sprog pÃĨ web brugergrÃĻnsefladen!", - "Due to the browser's caching, the full transformation could take some minute.": "Pga. browser cache kan skift af sprog tage nogle minutter.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "Forbindelse til UiServer Websocket blev tabt. Genopretter forbindelse...", - "Connection with UiServer Websocket recovered.": "Forbindelse til UiServer Websocket genoprettet.", - "UiServer Websocket error, please reload the page.": "UiServer Websocket fejl. GenindlÃĻs venligst siden (F5)!", - "   Connecting...": "   Opretter forbindelse...", - "Site size: ": "Side størrelse: ", - "MB is larger than default allowed ": "MB er større end den tilladte default ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Åben side og sÃĻt max side størrelse til \" + site_info.next_size_limit + \"MB", - " files needs to be downloaded": " filer skal downloades", - " downloaded": " downloadet", - " download failed": " download fejlede", - "Peers found: ": "Klienter fundet: ", - "No peers found": "Ingen klienter fundet", - "Running out of size limit (": "Siden fylder snart for meget (", - "Set limit to \" + site_info.next_size_limit + \"MB": "Ret max side størrelse til \" + site_info.next_size_limit + \"MB", - "Site size limit changed to {0}MB": "Max side størrelse ÃĻndret til {0}MB", - " New version of this page has just released.
    Reload to see the modified content.": " Ny version af denne side er blevet offentliggjort.
    GenindlÃĻs venligst siden (F5) for at se nyt indhold!", - "This site requests permission:": "Denne side betyder om tilladdelse:", - "_(Accept)": "Tillad" - -} diff --git a/src/Translate/languages/de.json b/src/Translate/languages/de.json deleted file mode 100644 index 1cc63b74..00000000 --- a/src/Translate/languages/de.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Gratulation, dein Port {0} ist offen.
    Du bist ein volles Mitglied des ZeroNet Netzwerks!", - "Tor mode active, every connection using Onion route.": "Tor Modus aktiv, jede Verbindung nutzt die Onion Route.", - "Successfully started Tor onion hidden services.": "Tor versteckte Dienste erfolgreich gestartet.", - "Unable to start hidden services, please check your config.": "Nicht mÃļglich versteckte Dienste zu starten.", - "For faster connections open {0} port on your router.": "FÃŧr schnellere Verbindungen, Ãļffne Port {0} auf deinem Router.", - "Your connection is restricted. Please, open {0} port on your router": "Deine Verbindung ist eingeschränkt. Bitte Ãļffne Port {0} auf deinem Router", - "or configure Tor to become a full member of the ZeroNet network.": "oder konfiguriere Tor um ein volles Mitglied des ZeroNet Netzwerks zu werden.", - - "Select account you want to use in this site:": "Wähle das Konto, das du auf dieser Seite benutzen willst:", - "currently selected": "aktuell ausgewählt", - "Unique to site": "Eindeutig zur Seite", - - "Content signing failed": "Signierung des Inhalts fehlgeschlagen", - "Content publish queued for {0:.0f} seconds.": "VerÃļffentlichung des Inhalts um {0:.0f} Sekunden verzÃļgert.", - "Content published to {0} peers.": "Inhalt zu {0} Peers verÃļffentlicht.", - "No peers found, but your content is ready to access.": "Keine Peers gefunden, aber dein Inhalt ist bereit zum Zugriff.", - "Your network connection is restricted. Please, open {0} port": "Deine Netzwerkverbindung ist beschränkt. Bitte Ãļffne Port {0}", - "on your router to make your site accessible for everyone.": "auf deinem Router um deine Seite fÃŧr Jeden zugänglich zu machen.", - "Content publish failed.": "Inhalt konnte nicht verÃļffentlicht werden.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Diese Datei wird noch synchronisiert. Wenn jetzt geschrieben wird geht der vorherige Inhalt verloren.", - "Write content anyway": "Inhalt trotzdem schreiben", - "New certificate added:": "Neues Zertifikat hinzugefÃŧgt:", - "You current certificate:": "Dein aktuelles Zertifikat:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "Ändere es zu {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Zertifikat geändert zu: {auth_type}/{auth_user_name}@{domain}.", - "Site cloned": "Seite geklont", - - "You have successfully changed the web interface's language!": "Du hast die Sprache des Webinterface erfolgreich geändert!", - "Due to the browser's caching, the full transformation could take some minute.": "Aufgrund des Browsercaches kann die volle Transformation Minuten dauern.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "Die Verbindung mit UiServer Websocketist abgebrochen. Neu verbinden...", - "Connection with UiServer Websocket recovered.": "Die Verbindung mit UiServer Websocket wurde wiederhergestellt.", - "UiServer Websocket error, please reload the page.": "UiServer Websocket Fehler, bitte Seite neu laden.", - "   Connecting...": "   Verbinden...", - "Site size: ": "SeitengrÃļße: ", - "MB is larger than default allowed ": "MB ist grÃļßer als der erlaubte Standart", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Öffne Seite und setze das Limit auf \" + site_info.next_size_limit + \"MB", - " files needs to be downloaded": " Dateien mÃŧssen noch heruntergeladen werden", - " downloaded": " heruntergeladen", - " download failed": " Herunterladen fehlgeschlagen", - "Peers found: ": "Peers gefunden: ", - "No peers found": "Keine Peers gefunden", - "Running out of size limit (": "Das Speicherlimit ist bald ausgeschÃļpft (", - "Set limit to \" + site_info.next_size_limit + \"MB": "Limit auf \" + site_info.next_size_limit + \"MB ändern", - "Site size limit changed to {0}MB": "Speicherlimit fÃŧr diese Seite auf {0}MB geändert", - " New version of this page has just released.
    Reload to see the modified content.": " Neue version dieser Seite wurde gerade verÃļffentlicht.
    Lade die Seite neu um den geänderten Inhalt zu sehen.", - "This site requests permission:": "Diese Seite fordert rechte:", - "_(Accept)": "Genehmigen" - -} diff --git a/src/Translate/languages/es.json b/src/Translate/languages/es.json deleted file mode 100644 index 4cac077b..00000000 --- a/src/Translate/languages/es.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "ÂĄFelicidades! tu puerto {0} estÃĄ abierto.
    ÂĄEres un miembro completo de la red Zeronet!", - "Tor mode active, every connection using Onion route.": "Modo Tor activado, cada conexiÃŗn usa una ruta Onion.", - "Successfully started Tor onion hidden services.": "Tor ha iniciado satisfactoriamente la ocultaciÃŗn de los servicios onion.", - "Unable to start hidden services, please check your config.": "No se puedo iniciar los servicios ocultos, por favor comprueba tu configuraciÃŗn.", - "For faster connections open {0} port on your router.": "Para conexiones mÃĄs rÃĄpidas abre el puerto {0} en tu router.", - "Your connection is restricted. Please, open {0} port on your router": "Tu conexiÃŗn estÃĄ limitada. Por favor, abre el puerto {0} en tu router", - "or configure Tor to become a full member of the ZeroNet network.": "o configura Tor para convertirte en un miembro completo de la red ZeroNet.", - - "Select account you want to use in this site:": "Selecciona la cuenta que quieres utilizar en este sitio:", - "currently selected": "actualmente seleccionada", - "Unique to site": "Única para el sitio", - - "Content signing failed": "Firma del contenido fallida", - "Content publish queued for {0:.0f} seconds.": "PublicaciÃŗn de contenido en cola durante {0:.0f} segundos.", - "Content published to {0} peers.": "Contenido publicado para {0} pares.", - "No peers found, but your content is ready to access.": "No se ha encontrado pares, pero tu contenido estÃĄ listo para ser accedido.", - "Your network connection is restricted. Please, open {0} port": "Tu conexiÃŗn de red estÃĄ restringida. Por favor, abre el puerto{0}", - "on your router to make your site accessible for everyone.": "en tu router para hacer tu sitio accesible a todo el mundo.", - "Content publish failed.": "PublicaciÃŗn de contenido fallida.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Este archivo estÃĄ aÃēn sincronizado, si le escribes ahora el contenido previo podría perderse.", - "Write content anyway": "Escribir el contenido de todas formas", - "New certificate added:": "Nuevo certificado aÃąadido:", - "You current certificate:": "Tu certificado actual:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "Cambia esto a {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificado cambiado a: {auth_type}/{auth_user_name}@{domain}.", - "Site cloned": "Sitio clonado", - - "You have successfully changed the web interface's language!": "ÂĄHas cambiado con Êxito el idioma de la interfaz web!", - "Due to the browser's caching, the full transformation could take some minute.": "Debido a la cachÊ del navegador, la transformaciÃŗn completa podría llevar unos minutos.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "Se perdiÃŗ la conexiÃŗn con UiServer Websocket. Reconectando...", - "Connection with UiServer Websocket recovered.": "ConexiÃŗn con UiServer Websocket recuperada.", - "UiServer Websocket error, please reload the page.": "Error de UiServer Websocket, por favor recarga la pÃĄgina.", - "   Connecting...": "   Conectando...", - "Site size: ": "TamaÃąo del sitio: ", - "MB is larger than default allowed ": "MB es mÃĄs grande de lo permitido por defecto", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Abre tu sitio and establece el límite de tamaÃąo a \" + site_info.next_size_limit + \"MBs", - " files needs to be downloaded": " Los archivos necesitan ser descargados", - " downloaded": " descargados", - " download failed": " descarga fallida", - "Peers found: ": "Pares encontrados: ", - "No peers found": "No se han encontrado pares", - "Running out of size limit (": "Superando el tamaÃąo límite (", - "Set limit to \" + site_info.next_size_limit + \"MB": "Establece ellímite a \" + site_info.next_size_limit + \"MB ändern", - "Site size limit changed to {0}MB": "Límite de tamaÃąo del sitio cambiado a {0}MBs", - " New version of this page has just released.
    Reload to see the modified content.": " Se ha publicado una nueva versiÃŗn de esta pÃĄgina .
    Recarga para ver el contenido modificado.", - "This site requests permission:": "Este sitio solicita permiso:", - "_(Accept)": "Conceder" - -} diff --git a/src/Translate/languages/fa.json b/src/Translate/languages/fa.json deleted file mode 100644 index e644247a..00000000 --- a/src/Translate/languages/fa.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "ØĒØ¨ØąÛŒÚŠØŒ Ø¯ØąÚ¯Ø§Ų‡ {0} Ø´Ų…Ø§ Ø¨Ø§Ø˛ Ø´Ø¯Ų‡ Ø§ØŗØĒ.
    Ø´Ų…Ø§ یڊ ØšØļ؈ ØĒŲ…Ø§Ų… Ø´Ø¨ÚŠŲ‡ ZeroNet Ų‡ØŗØĒید!", - "Tor mode active, every connection using Onion route.": "Ø­Ø§Ų„ØĒ Tor ŲØšØ§Ų„ Ø§ØŗØĒ، Ų‡Øą Ø§ØąØĒØ¨Ø§Øˇ Ø§Ø˛ Ų…ØŗÛŒØąÛŒØ§Ø¨ÛŒ ŲžÛŒØ§Ø˛ (Onion) Ø§ØŗØĒŲØ§Ø¯Ų‡ Ų…ÛŒâ€ŒÚŠŲ†Ø¯.", - "Successfully started Tor onion hidden services.": "ØŽØ¯Ų…Ø§ØĒ ŲžŲ†Ų‡Ø§Ų† ŲžÛŒØ§Ø˛ (Onion) Tor با Ų…ŲˆŲŲ‚ÛŒØĒ ØąØ§Ų‡â€ŒØ§Ų†Ø¯Ø§Ø˛ÛŒ شد.", - "Unable to start hidden services, please check your config.": "Ų‚Ø§Ø¯Øą Ø¨Ų‡ ØąØ§Ų‡â€ŒØ§Ų†Ø¯Ø§Ø˛ÛŒ ØŽØ¯Ų…Ø§ØĒ ŲžŲ†Ų‡Ø§Ų† Ų†ÛŒØŗØĒÛŒŲ…ØŒ Ų„ØˇŲØ§ ØĒŲ†Ø¸ÛŒŲ…Ø§ØĒ ØŽŲˆØ¯ ØąØ§ Ø¨ØąØąØŗÛŒ Ų†Ų…Ø§ÛŒÛŒØ¯.", - "For faster connections open {0} port on your router.": "Ø¨ØąØ§ÛŒ Ø§ØąØĒØ¨Ø§ØˇØ§ØĒ ØŗØąÛŒØšØĒØą Ø¯ØąÚ¯Ø§Ų‡ {0} ØąØ§ Ø¨Øą ØąŲˆÛŒ Ų…ØŗÛŒØąÛŒØ§Ø¨ (ØąŲˆØĒØą) ØŽŲˆØ¯ Ø¨Ø§Ø˛ Ų†Ų…Ø§ÛŒÛŒØ¯.", - "Your connection is restricted. Please, open {0} port on your router": "Ø§ØąØĒØ¨Ø§Øˇ Ø´Ų…Ø§ Ų…Ø­Ø¯ŲˆØ¯â€ŒØ´Ø¯Ų‡ Ø§ØŗØĒ. Ų„ØˇŲØ§ Ø¯ØąÚ¯Ø§Ų‡ {0} ØąØ§ Ø¯Øą Ų…ØŗÛŒØąÛŒØ§Ø¨ (ØąŲˆØĒØą) ØŽŲˆØ¯ Ø¨Ø§Ø˛ Ų†Ų…Ø§ÛŒÛŒØ¯", - "or configure Tor to become a full member of the ZeroNet network.": "یا ŲžÛŒÚŠØąØ¨Ų†Ø¯ÛŒ Tor ØąØ§ Ø§Ų†ØŦØ§Ų… Ø¯Ų‡ÛŒØ¯ ØĒا Ø¨Ų‡ یڊ ØšØļ؈ ØĒŲ…Ø§Ų… Ø´Ø¨ÚŠŲ‡ ZeroNet ØĒØ¨Ø¯ÛŒŲ„ Ø´ŲˆÛŒØ¯.", - - "Select account you want to use in this site:": "Ø­ØŗØ§Ø¨ÛŒ ØąØ§ ÚŠŲ‡ Ų…ÛŒâ€ŒØŽŲˆØ§Ų‡ÛŒØ¯ Ø¯Øą Ø§ÛŒŲ† ØŗØ§ÛŒØĒ Ø§ØŗØĒŲØ§Ø¯Ų‡ ÚŠŲ†ÛŒØ¯ØŒ Ø§Ų†ØĒ؎اب ÚŠŲ†ÛŒØ¯:", - "currently selected": "Ø¯Øą Ø­Ø§Ų„ حاØļØą Ø§Ų†ØĒØŽØ§Ø¨â€ŒØ´Ø¯Ų‡", - "Unique to site": "Ų…ØŽØĒØĩ Ø¨Ų‡ ØŗØ§ÛŒØĒ", - - "Content signing failed": "Ø§Ų…Øļای Ų…Ø­ØĒŲˆØ§ با Ø´ÚŠØŗØĒ Ų…ŲˆØ§ØŦŲ‡ شد", - "Content publish queued for {0:.0f} seconds.": "Ų…Ø­ØĒŲˆØ§ Ø¯Øą Øĩ؁ Ø§Ų†ØĒØ´Ø§Øą با {0:.0f} ØĢØ§Ų†ÛŒŲ‡ ØĒØ§ØŽÛŒØą Ų‚ØąØ§Øą Ú¯ØąŲØĒ.", - "Content published to {0} peers.": "Ų…Ø­ØĒŲˆØ§ Ø¨ØąØ§ÛŒ {0} ØĒؚداد Ų‡Ų…ØĒا Ø§Ų†ØĒØ´Ø§Øą ÛŒØ§ŲØĒ.", - "No peers found, but your content is ready to access.": "Ų‡Ų…ØĒایی ÛŒØ§ŲØĒ Ų†Ø´Ø¯ØŒ Ø§Ų…Ø§ Ų…Ø­ØĒŲˆØ§ÛŒ Ø´Ų…Ø§ ØĸŲ…Ø§Ø¯Ų‡ Ø¯ØŗØĒØąØŗÛŒ Ø§ØŗØĒ.", - "Your network connection is restricted. Please, open {0} port": "Ø§ØąØĒØ¨Ø§Øˇ Ø´Ø¨ÚŠŲ‡ Ø´Ų…Ø§ Ų…Ø­Ø¯ŲˆØ¯â€ŒØ´Ø¯Ų‡ Ø§ØŗØĒ. Ų„ØˇŲØ§ Ø¯ØąÚ¯Ø§Ų‡ {0} ØąØ§", - "on your router to make your site accessible for everyone.": "Ø¯Øą Ų…ØŗÛŒØąÛŒØ§Ø¨ (ØąŲˆØĒØą) ØŽŲˆØ¯ Ø¨Ø§Ø˛ ÚŠŲ†ÛŒØ¯ ØĒا ØŗØ§ÛŒØĒ ØŽŲˆØ¯ ØąØ§ Ø¨ØąØ§ÛŒ Ų‡Ų…Ų‡ Ø¯Øą Ø¯ØŗØĒØąØŗ Ų‚ØąØ§Øą Ø¯Ų‡ÛŒØ¯.", - "Content publish failed.": "Ø§Ų†ØĒØ´Ø§Øą Ų…Ø­ØĒŲˆØ§ Ų…ŲˆŲŲ‚ Ų†Ø¨ŲˆØ¯.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Ø§ÛŒŲ† ŲØ§ÛŒŲ„ Ų‡Ų…Ú†Ų†Ø§Ų† Ų‡Ų…Ú¯Ø§Ų… Ø§ØŗØĒ، Ø§Ú¯Ø˛ Ø´Ų…Ø§ ØĸŲ† ØąØ§ Ø¨Ų†ŲˆÛŒØŗÛŒØ¯ØŒ Ų…Ų…ÚŠŲ† Ø§ØŗØĒ Ų…Ø­ØĒŲˆØ§ÛŒ Ų‚Ø¨Ų„ÛŒ Ø§Ø˛â€ŒØ¨ÛŒŲ† ØąŲˆØ¯.", - "Write content anyway": "Ø¯Øą Ų‡Øą ØĩŲˆØąØĒ Ų…Ø­ØĒŲˆØ§ ØąØ§ Ø¨Ų†ŲˆÛŒØŗ", - "New certificate added:": "Ú¯ŲˆØ§Ų‡ÛŒ ØŦدیدی Ø§ŲØ˛ŲˆØ¯Ų‡ شد:", - "You current certificate:": "Ú¯ŲˆØ§Ų‡ÛŒ ŲØšŲ„ÛŒ Ø´Ų…Ø§:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "ØĒØēÛŒÛŒØąØ´ Ø¨Ø¯Ų‡ Ø¨Ų‡ {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Ú¯ŲˆØ§Ų‡ÛŒŲ†Ø§Ų…Ų‡ Ø¨Ų‡: {auth_type}/{auth_user_name}@{domain} ØĒØēÛŒÛŒØą ŲžÛŒØ¯Ø§ ÚŠØąØ¯.", - "Site cloned": "ØŗØ§ÛŒØĒ Ų‡Ų…ØŗØ§Ų†â€ŒØŗØ§Ø˛ÛŒ شد", - - "You have successfully changed the web interface's language!": "Ø´Ų…Ø§ با Ų…ŲˆŲŲ‚ÛŒØĒ Ø˛Ø¨Ø§Ų† ØąØ§Ø¨Øˇ ŲˆØ¨ ØąØ§ ØĒØēÛŒÛŒØą دادید!", - "Due to the browser's caching, the full transformation could take some minute.": "Ø¨Ų‡ Ø¯Ų„ÛŒŲ„ Ø°ØŽÛŒØąŲ‡â€ŒØŗØ§Ø˛ÛŒ Ø¯Øą Ų…ØąŲˆØąâ€ŒÚ¯ØąØŒ Ø§Ų…ÚŠØ§Ų† Ø¯Ø§ØąØ¯ ØĒØēÛŒÛŒØą Ø´ÚŠŲ„ ÚŠØ§Ų…Ų„ Ú†Ų†Ø¯ Ø¯Ų‚ÛŒŲ‚Ų‡ ØˇŲˆŲ„ بڊشد.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "اØĒØĩØ§Ų„ با UiServer Websocket Ų‚ØˇØš شد. اØĒØĩØ§Ų„ Ø¯ŲˆØ¨Ø§ØąŲ‡...", - "Connection with UiServer Websocket recovered.": "Ø§ØąØĒØ¨Ø§Øˇ با UiServer Websocket Ø¯ŲˆØ¨Ø§ØąŲ‡ Ø¨Øąâ€ŒŲ‚ØąØ§Øą شد.", - "UiServer Websocket error, please reload the page.": "ØŽØˇØ§ÛŒ UiServer Websocket, Ų„ØˇŲØ§ ØĩŲØ­Ų‡ ØąØ§ Ø¯ŲˆØ¨Ø§ØąŲ‡ Ø¨Ø§ØąÚ¯ÛŒØąÛŒ ÚŠŲ†ÛŒØ¯.", - "   Connecting...": "   Ø¨ØąŲ‚ØąØ§ØąÛŒ Ø§ØąØĒØ¨Ø§Øˇ...", - "Site size: ": "Ø­ØŦŲ… ØŗØ§ÛŒØĒ: ", - "MB is larger than default allowed ": "MB بیشØĒØą Ø§Ø˛ ŲžÛŒØ´â€ŒŲØąØļ Ų…ØŦØ§Ø˛ Ø§ØŗØĒ ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "ØŗØ§ÛŒØĒ ØąØ§ Ø¨Ø§Ø˛ ÚŠØąØ¯Ų‡ ؈ Ų…Ø­Ø¯ŲˆØ¯Ų‡ Ø­ØŦŲ… ØąØ§ Ø¨Ų‡ \" + site_info.next_size_limit + \"MB ØĒŲ†Ø¸ÛŒŲ… ÚŠŲ†", - " files needs to be downloaded": " ŲØ§ÛŒŲ„â€ŒŲ‡Ø§ÛŒÛŒ ÚŠŲ‡ Ų†ÛŒØ§Ø˛ Ø§ØŗØĒ، Ø¯Ø§Ų†Ų„ŲˆØ¯ Ø´ŲˆŲ†Ø¯", - " downloaded": " Ø¯Ø§Ų†Ų„ŲˆØ¯ شد", - " download failed": " Ø¯Ø§Ų†Ų„ŲˆØ¯ Ų…ŲˆŲŲ‚ Ų†Ø¨ŲˆØ¯", - "Peers found: ": "Ú†Ų†Ø¯ Ų‡Ų…ØĒا ÛŒØ§ŲØĒ شد: ", - "No peers found": "Ų‡Ų…ØĒایی ÛŒØ§ŲØĒ Ų†Ø´Ø¯", - "Running out of size limit (": "ØšØ¨ŲˆØą ÚŠØąØ¯Ų‡ Ø§Ø˛ Ų…Ø­Ø¯ŲˆØ¯Ų‡ Ø­ØŦŲ… (", - "Set limit to \" + site_info.next_size_limit + \"MB": "Ų…Ø­Ø¯ŲˆØ¯Ų‡ ØąØ§ Ø¨Ų‡ \" + site_info.next_size_limit + \"MB ØĒŲ†Ø¸ÛŒŲ… ÚŠŲ†", - "Site size limit changed to {0}MB": "Ų…Ø­Ø¯ŲˆØ¯Ų‡ Ø­ØŦŲ… ØŗØ§ÛŒØĒ Ø¨Ų‡ {0}MB ØĒØēÛŒÛŒØą ÚŠØąØ¯", - " New version of this page has just released.
    Reload to see the modified content.": " Ų†ØŗØŽŲ‡ ØŦدیدی Ø§Ø˛ Ø§ÛŒŲ† ØĩŲØ­Ų‡ Ų…Ų†ØĒØ´Øą Ø´Ø¯Ų‡ Ø§ØŗØĒ.
    Ø¨ØąØ§ÛŒ Ų…Ø´Ø§Ų‡Ø¯Ų‡ Ų…Ø­ØĒŲˆØ§ÛŒ ØĒØēÛŒÛŒØąâ€ŒÛŒØ§ŲØĒŲ‡ Ø¯ŲˆØ¨Ø§ØąŲ‡ Ø¨Ø§ØąÚ¯ÛŒØąÛŒ Ų†Ų…Ø§ÛŒÛŒØ¯.", - "This site requests permission:": "Ø§ÛŒŲ† ØŗØ§ÛŒØĒ Ø¯ØąØŽŲˆØ§ØŗØĒ Ų…ØŦŲˆØ˛ Ų…ÛŒâ€ŒÚŠŲ†Ø¯:", - "_(Accept)": "_(ŲžØ°ÛŒØąŲØĒŲ†)" -} diff --git a/src/Translate/languages/fr.json b/src/Translate/languages/fr.json deleted file mode 100644 index b46ef2c3..00000000 --- a/src/Translate/languages/fr.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "FÊlicitations, le port ({0}) est ouvert.
    Vous ÃĒtes maintenant membre de ZeroNet!!", - "Tor mode active, every connection using Onion route.": "Tor actif, toutes les connexions utilisent un routage Onion.", - "Successfully started Tor onion hidden services.": "Tor activÊ avec succès.", - "Unable to start hidden services, please check your config.": "Impossible d'activer Tor, veuillez vÊrifier votre configuration.", - "For faster connections open {0} port on your router.": "Pour une meilleure connectivitÊ, ouvrez le port {0} sur votre routeur.", - "Your connection is restricted. Please, open {0} port on your router": "ConnectivitÊ limitÊe. Veuillez ouvrir le port {0} sur votre routeur", - "or configure Tor to become a full member of the ZeroNet network.": "ou configurez Tor afin d'avoir accès aux pairs ZeroNet Onion.", - - "Select account you want to use in this site:": "SÊlectionnez le compte que vous voulez utiliser pour ce site:", - "currently selected": "prÊsentement sÊlectionnÊ", - "Unique to site": "Unique au site", - - "Content signing failed": "Échec à la signature du contenu", - "Content publish queued for {0:.0f} seconds.": "Publication du contenu diffÊrÊe {0:.0f} secondes.", - "Content published to {0} peers.": "Contenu publiÊ à {0} pairs.", - "No peers found, but your content is ready to access.": "Aucun pair trouvÊ, mais votre contenu est accessible.", - "Your network connection is restricted. Please, open {0} port": "ConnectivitÊ limitÊe. Veuillez ouvrir le port {0}", - "on your router to make your site accessible for everyone.": "sur votre routeur pour que votre site soit accessible à tous.", - "Content publish failed.": "Échec de la publication du contenu.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Ce fichier n'est pas à jour, si vous le modifiez maintenant une version antÊrieure pourrait ÃĒtre perdue.", - "Write content anyway": "Enregistrer quand mÃĒme", - "New certificate added:": "Nouveau cetificat ajoutÊ :", - "You current certificate:": "Votre certificat actuel :", - "Change it to {auth_type}/{auth_user_name}@{domain}": "Changer pour {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificat changÊ pour : {auth_type}/{auth_user_name}@{domain}-ra.", - "Site cloned": "Site clonÊ", - - "You have successfully changed the web interface's language!": "Vous avez modifiÊ la langue d'affichage avec succès!", - "Due to the browser's caching, the full transformation could take some minute.": "En fonction du cache du navigateur, la modification pourrait prendre quelques minutes.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "Connexion avec UiServer Websocket rompue. Reconnexion...", - "Connection with UiServer Websocket recovered.": "Connexion avec UiServer Websocket rÊtablie.", - "UiServer Websocket error, please reload the page.": "Erreur du UiServer Websocket, veuillez recharger la page.", - "   Connecting...": "   Connexion...", - "Site size: ": "Taille du site : ", - "MB is larger than default allowed ": "MB est plus large que la taille permise par dÊfaut ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Ouvrez le site et augmentez la taille maximale à \" + site_info.next_size_limit + \"MB-ra", - " files needs to be downloaded": " fichiers doivent ÃĒtre tÊlÊchargÊs", - " downloaded": " tÊlÊchargÊs", - " download failed": " Êchec de tÊlÊchargement", - "Peers found: ": "Pairs trouvÊs: ", - "No peers found": "Aucun pair trouvÊ", - "Running out of size limit (": "Vous approchez la taille maximale (", - "Set limit to \" + site_info.next_size_limit + \"MB": "Augmentez la taille maximale à \" + site_info.next_size_limit + \"MB", - "Site size limit changed to {0}MB": "Taille maximale du site changÊe à {0}MB", - " New version of this page has just released.
    Reload to see the modified content.": " Une nouvelle version de cette page vient d'ÃĒtre publiÊe.
    Rechargez pour voir les modifications.", - "This site requests permission:": "Ce site requiert une permission :", - "_(Accept)": "Autoriser" - -} diff --git a/src/Translate/languages/hu.json b/src/Translate/languages/hu.json deleted file mode 100644 index f9487f1d..00000000 --- a/src/Translate/languages/hu.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "GratulÃĄlunk, a portod ({0}) nyitva van.
    Teljes ÊrtÊkÅą tagja vagy a hÃĄlÃŗzatnak!", - "Tor mode active, every connection using Onion route.": "Tor mÃŗd aktív, minden kapcsolat az Onion hÃĄlÃŗzaton keresztÃŧl tÃļrtÊnik.", - "Successfully started Tor onion hidden services.": "Sikeresen elindultak a Tor onion titkos szolgÃĄltatÃĄsok.", - "Unable to start hidden services, please check your config.": "Nem sikerÃŧlt elindítani a Tor onion szolgÃĄltatÃĄsokat. KÊrjÃŧk, ellenőrizd a beÃĄllítÃĄsokat!", - "For faster connections open {0} port on your router.": "A gyorsabb kapcsolatok ÊrdekÊben nyisd ki a {0} portot a routereden.", - "Your connection is restricted. Please, open {0} port on your router": "A kapcsolatod korlÃĄtozott. KÊrjÃŧk, nyisd ki a {0} portot a routereden", - "or configure Tor to become a full member of the ZeroNet network.": "vagy ÃĄllítsd be a Tor kliensed, hogy teljes ÊrtÊkÅą tagja legyÊl a hÃĄlÃŗzatnak!", - - "Select account you want to use in this site:": "VÃĄlaszd ki az oldalhoz hasznÃĄlt felhasznÃĄlÃŗnevet:", - "currently selected": "jelenleg kijelÃļlt", - "Unique to site": "Egyedi az oldalon", - - "Content signing failed": "Tartalom alÃĄÃ­rÃĄsa sikeretelen", - "Content publish queued for {0:.0f} seconds.": "Tartalom publikÃĄlÃĄsa elhalasztva {0:.0f} mÃĄsodperccel.", - "Content published to {0} peers.": "Tartalom publikÃĄlva {0} fÊl rÊszÊre.", - "No peers found, but your content is ready to access.": "Aktív csatlakozÃĄsi pont nem talÃĄlhatÃŗ, de a tartalmad kÊszen ÃĄll a kiszolgÃĄlÃĄsra.", - "Your network connection is restricted. Please, open {0} port": "A kapcsolatod korlÃĄtozott. KÊrjÃŧk, nyisd ki a {0} portot", - "on your router to make your site accessible for everyone.": "a routereden, hogy az oldalad mindenki szÃĄmÃĄra elÊrhető legyen.", - "Content publish failed.": "Sikertelen tartalom publikÃĄlÃĄs.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Ez a fÃĄjl mÊg letÃļltÊs alatt van, ha most felÃŧlírod a korÃĄbbi tartalma elveszhet.", - "Write content anyway": "FelÃŧlírÃĄs", - "New certificate added:": "Új tanÃēsítvÃĄny hozzÃĄadva:", - "You current certificate:": "A jelenlegi tanÃēsítvÃĄnyod: ", - "Change it to {auth_type}/{auth_user_name}@{domain}": "VÃĄltoztatÃĄs {auth_type}/{auth_user_name}@{domain}-ra", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "A tanÃēsítvÃĄny megvÃĄltozott {auth_type}/{auth_user_name}@{domain}-ra.", - "Site cloned": "Az oldal klÃŗnozva", - - "You have successfully changed the web interface's language!": "Sikeresen ÃĄtÃĄllítottad a web felÃŧlet nyelvÊt!", - "Due to the browser's caching, the full transformation could take some minute.": "A bÃļngÊsző cache-elÊse miatt egy pÃĄr percig eltarthat a teljes ÃĄtÃĄllÃĄs.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "Az UiServer Websocket kapcsolat megszakadt. ÚjracsatlakozÃĄs...", - "Connection with UiServer Websocket recovered.": "Az UiServer Websocket kapcsolat visszaÃĄllt.", - "UiServer Websocket error, please reload the page.": "UiServer Websocket hiba, tÃļltsd Ãējra az oldalt!", - "   Connecting...": "   CsatlakozÃĄs...", - "Site size: ": "Oldal mÊrete: ", - "MB is larger than default allowed ": "MB nagyobb, mint az engedÊlyezett ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Az oldal megnyitÃĄsa Ês a korlÃĄt mÃŗdosítÃĄsa \" + site_info.next_size_limit + \"MB-ra", - " files needs to be downloaded": " fÃĄjlt kell letÃļlteni", - " downloaded": " letÃļltve", - " download failed": " letÃļltÊs sikertelen", - "Peers found: ": "TalÃĄlt csatlakozÃĄsi pontok: ", - "No peers found": "Nincs csatlakozÃĄsi pont", - "Running out of size limit (": "Az oldal hamarosan elÊri a mÊretkorlÃĄtot (", - "Set limit to \" + site_info.next_size_limit + \"MB": "A korlÃĄt mÃŗdosítÃĄsa \" + site_info.next_size_limit + \"MB-ra", - "Site size limit changed to {0}MB": "A mÊretkorlÃĄt mÃŗdosítva {0}MB-ra", - " New version of this page has just released.
    Reload to see the modified content.": "Az oldal Êpp most mÃŗdosult
    A megvÃĄltozott tartalomÊrt tÃļltsd Ãējra!", - "This site requests permission:": "Az oldal megtekintÊsÊhez szÃŧksÊges jog:", - "_(Accept)": "EngedÊlyezÊs" - -} diff --git a/src/Translate/languages/it.json b/src/Translate/languages/it.json deleted file mode 100644 index 47992328..00000000 --- a/src/Translate/languages/it.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Congratulazioni, la tua porta {0} è aperta.
    Ora sei un membro effettivo della rete ZeroNet!", - "Tor mode active, every connection using Onion route.": "Modalità Tor attiva, ogni connessione sta usando la rete Onion.", - "Successfully started Tor onion hidden services.": "Servizi Tor onion nascosti avviati con successo.", - "Unable to start hidden services, please check your config.": "Impossibile avviare i servizi nascosti. Si prega di controllare la propria configurazione!", - "For faster connections open {0} port on your router.": "Per avere connessioni piÚ veloci aprire la porta {0} sul router.", - "Your connection is restricted. Please, open {0} port on your router": "La tua connessione è limitata. Aprire la porta {0} sul router", - "or configure Tor to become a full member of the ZeroNet network.": "o configurare Tor per diventare membro effettivo della rete ZeroNet!", - - "Select account you want to use in this site:": "Seleziona l'account che vuoi utilizzare per questo sito:", - "currently selected": "attualmente selezionato", - "Unique to site": "Unico sul sito", - - "Content signing failed": "Firma contenuti fallita", - "Content publish queued for {0:.0f} seconds.": "Pubblicazione contenuti in coda per {0:.0f} secondi.", - "Content published to {0} peers.": "Contenuti pubblicati su {0} peer.", - "No peers found, but your content is ready to access.": "Nessun peer trovato, ma i tuoi contenuti sono pronti per l'accesso.", - "Your network connection is restricted. Please, open {0} port": "La tua connessione di rete è limitata. Aprire la porta {0} ", - "on your router to make your site accessible for everyone.": "sul router, per rendere il sito accessibile a chiunque.", - "Content publish failed.": "Pubblicazione contenuti fallita.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Questo file è ancora in sincronizzazione, se viene modificato i contenuti precedenti andranno persi.", - "Write content anyway": "Scrivere comunque i contenuti", - "New certificate added:": "Aggiunto nuovo certificato:", - "You current certificate:": "Il tuo attuale certificato:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "Cambiarlo in {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificato cambianto in: {auth_type}/{auth_user_name}@{domain}.", - "Site cloned": "Sito clonato", - - "You have successfully changed the web interface's language!": "Hai cambiato con successo la lingua dell'interfaccia web!", - "Due to the browser's caching, the full transformation could take some minute.": "La trasformazione completa potrebbe richiedre alcuni minuti a causa della cache del browser.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "La connessione con UiServer Websocket è andata persa. Riconnessione...", - "Connection with UiServer Websocket recovered.": "Connessione con UiServer Websocket recuperata.", - "UiServer Websocket error, please reload the page.": "Errore UiServer Websocket, ricaricare la pagina!", - "   Connecting...": "   Connessione...", - "Site size: ": "Dimensione del sito: ", - "MB is larger than default allowed ": "MB è piÚ grande del valore predefinito consentito ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Aprire il sito e impostare la dimensione limite a \" + site_info.next_size_limit + \"MB", - " files needs to be downloaded": " i file devono essere scaricati", - " downloaded": " scaricati", - " download failed": " scaricamento fallito", - "Peers found: ": "Peer trovati: ", - "No peers found": "Nessun peer trovato", - "Running out of size limit (": "Superato il limite di spazio (", - "Set limit to \" + site_info.next_size_limit + \"MB": "Imposta il limite a \" + site_info.next_size_limit + \"MB", - "Site size limit changed to {0}MB": "Limite di spazio cambiato a {0}MB", - " New version of this page has just released.
    Reload to see the modified content.": "E' stata rilasciata una nuova versione di questa pagina
    Ricaricare per vedere il contenuto modificato!", - "This site requests permission:": "Questo sito richiede permessi:", - "_(Accept)": "Concedere" - -} diff --git a/src/Translate/languages/jp.json b/src/Translate/languages/jp.json deleted file mode 100644 index ff10aee4..00000000 --- a/src/Translate/languages/jp.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "おめでとうございぞす。ポãƒŧト {0} が開きぞした。これでZeroNetネットワãƒŧã‚¯ãŽãƒĄãƒŗãƒãƒŧです。", - "Tor mode active, every connection using Onion route.": "Torãƒĸãƒŧドがã‚ĸã‚¯ãƒ†ã‚Ŗãƒ–ã§ã™ã€å…¨ãĻぎæŽĨįļšã¯OnionãƒĢãƒŧトをäŊŋį”¨ã—ãžã™ã€‚", - "Successfully started Tor onion hidden services.": "Tor onionã‚ĩãƒŧãƒ“ã‚šã‚’æ­Ŗå¸¸ãĢ開始しぞした。", - "Unable to start hidden services, please check your config.": "非表į¤ēぎã‚ĩãƒŧãƒ“ã‚šã‚’é–‹å§‹ã§ããžã›ã‚“ã€‚č¨­åŽšã‚’įĸēčĒã—ãĻください。", - "For faster connections open {0} port on your router.": "æŽĨįļšã‚’éĢ˜é€ŸåŒ–ã™ã‚‹ãĢはãƒĢãƒŧã‚ŋãƒŧぎポãƒŧト {0} を開けãĻください。", - "Your connection is restricted. Please, open {0} port on your router": "æŽĨįļšãŒåˆļ限されãĻいぞす。ãƒĢãƒŧã‚ŋãƒŧぎポãƒŧト {0} を開けãĻください。", - "or configure Tor to become a full member of the ZeroNet network.": "ぞたは、TorをZeroNetネットワãƒŧã‚¯ãŽãƒĄãƒŗãƒãƒŧãĢãĒるようãĢč¨­åŽšã—ãĻください。", - - "Select account you want to use in this site:": "こぎã‚ĩイトでäŊŋį”¨ã™ã‚‹ã‚ĸã‚Ģã‚Ļãƒŗãƒˆã‚’é¸æŠž:", - "No certificate": "č¨ŧ明書がありぞせん", - "currently selected": "įžåœ¨é¸æŠžä¸­", - "Unique to site": "ã‚ĩイトå›ē有", - - "Content signing failed": "ã‚ŗãƒŗãƒ†ãƒŗãƒ„ãŽįŊ˛åãĢå¤ąæ•—", - "Content publish queued for {0:.0f} seconds.": "ã‚ŗãƒŗãƒ†ãƒŗãƒ„ãŽå…Ŧ開は{0:.0f}į§’ãŽã‚­ãƒĨãƒŧãĢå…Ĩれられぞした。", - "Content published to {0}/{1} peers.": "ã‚ĩイトぎ更新を通įŸĨ済 {0}/{1} ピã‚ĸ", - "Content published to {0} peers.": "{0}ピã‚ĸãĢå…Ŧé–‹ã•ã‚ŒãŸã‚ŗãƒŗãƒ†ãƒŗãƒ„ã€‚", - "No peers found, but your content is ready to access.": "ピã‚ĸはčĻ‹ã¤ã‹ã‚Šãžã›ã‚“ã§ã—ãŸãŒã€ã‚ŗãƒŗãƒ†ãƒŗãƒ„ãĢã‚ĸクã‚ģ゚するæē–備ができぞした。", - "Your network connection is restricted. Please, open {0} port": "ネットワãƒŧクæŽĨįļšãŒåˆļ限されãĻいぞす。ポãƒŧト {0} を開いãĻ、", - "on your router to make your site accessible for everyone.": "čĒ°ã§ã‚‚ã‚ĩイトãĢã‚ĸクã‚ģ゚できるようãĢしãĻください。", - "Content publish failed.": "ã‚ŗãƒŗãƒ†ãƒŗãƒ„ãŽå…Ŧ開ãĢå¤ąæ•—ã—ãžã—ãŸã€‚", - "This file still in sync, if you write it now, then the previous content may be lost.": "ã“ãŽãƒ•ã‚Ąã‚¤ãƒĢはぞだ同期しãĻいぞす。äģŠã™ãæ›¸ãčžŧã‚€ã¨ã€å‰ãŽã‚ŗãƒŗãƒ†ãƒŗãƒ„ãŒå¤ąã‚ã‚Œã‚‹å¯čƒŊ性がありぞす。", - "Write content anyway": "とãĢã‹ãã‚ŗãƒŗãƒ†ãƒŗãƒ„ã‚’æ›¸ã", - "New certificate added:": "新しいč¨ŧ明書がčŋŊ加されぞした:", - "You current certificate:": "įžåœ¨ãŽč¨ŧ明書:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "{auth_type}/{auth_user_name}@{domain} ãĢ変更", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "変更垌ぎč¨ŧ明書: {auth_type}/{auth_user_name}@{domain}", - "Site cloned": "複čŖŊされたã‚ĩイト", - - "You have successfully changed the web interface's language!": "Webã‚¤ãƒŗã‚ŋãƒŧフェãƒŧã‚šãŽč¨€čĒžãŒæ­Ŗå¸¸ãĢ変更されぞしたīŧ", - "Due to the browser's caching, the full transformation could take some minute.": "ブナã‚Ļã‚ļãŽã‚­ãƒŖãƒƒã‚ˇãƒĨãĢより、厌全ãĒ変換ãĢは数分かかる場合がありぞす。", - - "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer WebsocketとぎæŽĨįļšãŒå¤ąã‚ã‚Œãžã—た。再æŽĨįļšã—ãĻいぞす...", - "Connection with UiServer Websocket recovered.": "UiServer WebsocketとぎæŽĨįļšãŒå›žåžŠã—ぞした。", - "UiServer Websocket error, please reload the page.": "UiServer Websocketエナãƒŧ、ペãƒŧジをãƒĒロãƒŧドしãĻください。", - "   Connecting...": "   æŽĨįļšã—ãĻいぞす...", - "Site size: ": "ã‚ĩイトã‚ĩイã‚ē: ", - "MB is larger than default allowed ": "MBはデフりãƒĢãƒˆãŽč¨ąåŽšå€¤ã‚ˆã‚Šã‚‚å¤§ãã„ã§ã™ã€‚ ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "ã‚ĩイトを開き、ã‚ĩイã‚ēåˆļ限を \" + site_info.next_size_limit + \"MB ãĢč¨­åŽš", - " files needs to be downloaded": " ãƒ•ã‚Ąã‚¤ãƒĢをダã‚Ļãƒŗãƒ­ãƒŧドするåŋ…čĻãŒã‚ã‚Šãžã™", - " downloaded": " ダã‚Ļãƒŗãƒ­ãƒŧド", - " download failed": " ダã‚Ļãƒŗãƒ­ãƒŧãƒ‰å¤ąæ•—", - "Peers found: ": "ピã‚ĸがčĻ‹ã¤ã‹ã‚Šãžã—ãŸ: ", - "No peers found": "ピã‚ĸがčĻ‹ã¤ã‹ã‚Šãžã›ã‚“", - "Running out of size limit (": "ã‚ĩイã‚ēåˆļ限をäŊŋい果たしぞした (", - "Set limit to \" + site_info.next_size_limit + \"MB": "åˆļ限を \" + site_info.next_size_limit + \"MB ãĢč¨­åŽš", - "Cloning site...": "ã‚ĩã‚¤ãƒˆã‚’č¤‡čŖŊ中â€Ļ", - "Site size limit changed to {0}MB": "ã‚ĩイトぎã‚ĩイã‚ēåˆļ限が {0}MB ãĢ変更されぞした", - " New version of this page has just released.
    Reload to see the modified content.": " こぎペãƒŧジぎ新しいバãƒŧã‚¸ãƒ§ãƒŗãŒå…Ŧ開されぞした。
    å¤‰æ›´ã•ã‚ŒãŸã‚ŗãƒŗãƒ†ãƒŗãƒ„ã‚’čĻ‹ã‚‹ãĢは再čĒ­ãŋčžŧãŋしãĻください。", - "This site requests permission:": "こぎã‚ĩイトは樊限をčĻæą‚ã—ãĻいぞす:", - "_(Accept)": "_(č¨ąå¯)", - - "Save": "äŋå­˜", - "Trackers announcing": "トナッã‚ĢãƒŧをおįŸĨらせ", - "Error": "エナãƒŧ", - "Done": "厌äē†", - "Tracker connection error detected.": "トナッã‚ĢãƒŧæŽĨįļšã‚¨ãƒŠãƒŧが検å‡ēされぞした。", - - "Update ZeroNet client to latest version?": "ZeroNetクナイã‚ĸãƒŗãƒˆã‚’æœ€æ–°į‰ˆãĢ更新しぞすかīŧŸ", - "Update": "更新", - "Restart ZeroNet client?": "ZeroNetクナイã‚ĸãƒŗãƒˆã‚’å†čĩˇå‹•しぞすかīŧŸ", - "Restart": "再čĩˇå‹•", - "Shut down ZeroNet client?": "ZeroNetクナイã‚ĸãƒŗãƒˆã‚’įĩ‚äē†ã—ぞすかīŧŸ", - "Shut down": "įĩ‚äē†" -} diff --git a/src/Translate/languages/nl.json b/src/Translate/languages/nl.json deleted file mode 100644 index 985cce7a..00000000 --- a/src/Translate/languages/nl.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Gefeliciteerd, je poort {0} is geopend.
    Je bent een volledig lid van het ZeroNet netwerk!", - "Tor mode active, every connection using Onion route.": "Tor modus actief, elke verbinding gebruikt een Onion route.", - "Successfully started Tor onion hidden services.": "Tor onion verborgen diensten zijn met succes gestart.", - "Unable to start hidden services, please check your config.": "Het was niet mogelijk om verborgen diensten te starten, controleer je configuratie.", - "For faster connections open {0} port on your router.": "Voor snellere verbindingen open je de poort {0} op je router.", - "Your connection is restricted. Please, open {0} port on your router": "Je verbinding is beperkt. Open altjeblieft poort {0} op je router", - "or configure Tor to become a full member of the ZeroNet network.": "of configureer Tor om een volledig lid van het ZeroNet netwerk te worden.", - - "Select account you want to use in this site:": "Selecteer het account die je wilt gebruiken binnen deze site:", - "currently selected": "huidige selectie", - "Unique to site": "Uniek voor deze site", - - "Content signing failed": "Inhoud ondertekenen mislukt", - "Content publish queued for {0:.0f} seconds.": "Publiceren van inhoud staat in de wachtrij voor {0:.0f} seconden.", - "Content published to {0} peers.": "Inhoud is gepubliceerd naar {0} peers", - "No peers found, but your content is ready to access.": "Geen peers gevonden, maar je inhoud is klaar voor toegang.", - "Your network connection is restricted. Please, open {0} port": "Je netwerkverbinding is beperkt. Open alsjeblieft poort {0}", - "on your router to make your site accessible for everyone.": "op je router om je site toegankelijk te maken voor iedereen.", - "Content publish failed.": "Inhoud publicatie mislukt.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Dit bestand is nog in sync, als je het nu overschrijft, dan is mogelijk de vorige inhoud verloren.", - "Write content anyway": "Inhoud toch schrijven", - "New certificate added:": "Nieuw certificaat toegevoegd:", - "You current certificate:": "Je huidige certificaat:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "Verander het naar {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificaat veranderd naar: {auth_type}/{auth_user_name}@{domain}.", - "Site cloned": "Site gecloned", - - "You have successfully changed the web interface's language!": "Je hebt met succes de taal van de web interface aangepast!", - "Due to the browser's caching, the full transformation could take some minute.": "Door caching van je browser kan de volledige transformatie enkele minuten duren.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "Verbinding met UiServer Websocket verbroken. Opnieuw verbinden...", - "Connection with UiServer Websocket recovered.": "Verbinding met UiServer Websocket hersteld.", - "UiServer Websocket error, please reload the page.": "UiServer Websocket fout, herlaad alsjeblieft de pagina.", - "   Connecting...": "   Verbinden...", - "Site size: ": "Site grootte ", - "MB is larger than default allowed ": "MB is groter dan de standaard toegestaan ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Open de site en stel de limeit op de grootte in op \" + site_info.next_size_limit + \"MB", - " files needs to be downloaded": " bestanden moeten worden gedownload", - " downloaded": " gedownload", - " download failed": " download mislukt", - "Peers found: ": "Peers gevonden: ", - "No peers found": "Geen peers gevonden", - "Running out of size limit (": "Limeit op grootte bereikt (", - "Set limit to \" + site_info.next_size_limit + \"MB": "Stel limiet in op \" + site_info.next_size_limit + \"MB", - "Site size limit changed to {0}MB": "Site limiet op grootte is veranderd naar {0}MB", - " New version of this page has just released.
    Reload to see the modified content.": " Een nieuwe versie van deze pagina is zojuist uitgekomen.
    Herlaad de pagina om de bijgewerkte inhoud te zien.", - "This site requests permission:": "Deze site vraagt om permissie:", - "_(Accept)": "Toekennen" - -} diff --git a/src/Translate/languages/pl.json b/src/Translate/languages/pl.json deleted file mode 100644 index 679e909d..00000000 --- a/src/Translate/languages/pl.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Gratulacje, twÃŗj port {0} jest otwarty.
    Jesteś pełnoprawnym uÅŧytkownikiem sieci ZeroNet!", - "Tor mode active, every connection using Onion route.": "Tryb Tor aktywny, kaÅŧde połączenie przy uÅŧyciu trasy Cebulowej.", - "Successfully started Tor onion hidden services.": "Pomyślnie zainicjowano ukryte usługi cebulowe Tor.", - "Unable to start hidden services, please check your config.": "Niezdolny do uruchomienia ukrytych usług, proszę sprawdÅē swoją konfigurację.", - "For faster connections open {0} port on your router.": "Dla szybszego połączenia otwÃŗrz {0} port w swoim routerze.", - "Your connection is restricted. Please, open {0} port on your router": "Połączenie jest ograniczone. Proszę, otwÃŗrz port {0} w swoim routerze", - "or configure Tor to become a full member of the ZeroNet network.": "bądÅē skonfiguruj Tora by stać się pełnoprawnym uÅŧytkownikiem sieci ZeroNet.", - - "Select account you want to use in this site:": "Wybierz konto ktÃŗrego chcesz uÅŧyć na tej stronie:", - "currently selected": "aktualnie wybrany", - "Unique to site": "Unikatowy dla strony", - - "Content signing failed": "Podpisanie treści zawiodło", - "Content publish queued for {0:.0f} seconds.": "Publikacja treści wstrzymana na {0:.0f} sekund(y).", - "Content published to {0} peers.": "Treść opublikowana do {0} uzytkownikÃŗw.", - "No peers found, but your content is ready to access.": "Nie odnaleziono uÅŧytkownikÃŗw, ale twoja treść jest dostępna.", - "Your network connection is restricted. Please, open {0} port": "Twoje połączenie sieciowe jest ograniczone. Proszę, otwÃŗrz port {0}", - "on your router to make your site accessible for everyone.": "w swoim routerze, by twoja strona mogłabyć dostępna dla wszystkich.", - "Content publish failed.": "Publikacja treści zawiodła.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Ten plik wciąÅŧ się synchronizuje, jeśli zapiszesz go teraz, poprzednia treść moÅŧe zostać utracona.", - "Write content anyway": "Zapisz treść mimo wszystko", - "New certificate added:": "Nowy certyfikat dodany:", - "You current certificate:": "TwÃŗj aktualny certyfikat: ", - "Change it to {auth_type}/{auth_user_name}@{domain}": "Zmień na {auth_type}/{auth_user_name}@{domain}-ra", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certyfikat zmieniony na {auth_type}/{auth_user_name}@{domain}-ra.", - "Site cloned": "Strona sklonowana", - - "You have successfully changed the web interface's language!": "Pomyślnie zmieniono język interfejsu stron!", - "Due to the browser's caching, the full transformation could take some minute.": "Ze względu na buforowanie przeglądarki, pełna zmiana moÅŧe zająć parę minutę.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "Połączenie z UiServer Websocket zostało przerwane. Ponowne łączenie...", - "Connection with UiServer Websocket recovered.": "Połączenie z UiServer Websocket przywrÃŗcone.", - "UiServer Websocket error, please reload the page.": "Błąd UiServer Websocket, prosze odświeÅŧyć stronę.", - "   Connecting...": "   ÅÄ…czenie...", - "Site size: ": "Rozmiar strony: ", - "MB is larger than default allowed ": "MB jest większy niÅŧ domyślnie dozwolony ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "OtwÃŗrz stronę i ustaw limit na \" + site_info.next_size_limit + \"MBÃŗw", - " files needs to be downloaded": " pliki muszą zostać ściągnięte", - " downloaded": " ściągnięte", - " download failed": " ściąganie nie powiodło się", - "Peers found: ": "Odnaleziono uÅŧytkownikÃŗw: ", - "No peers found": "Nie odnaleziono uÅŧytkownikÃŗw", - "Running out of size limit (": "Limit rozmiaru na wyczerpaniu (", - "Set limit to \" + site_info.next_size_limit + \"MB": "Ustaw limit na \" + site_info.next_size_limit + \"MBÃŗw", - "Site size limit changed to {0}MB": "Rozmiar limitu strony zmieniony na {0}MBÃŗw", - " New version of this page has just released.
    Reload to see the modified content.": "Nowa wersja tej strony właśnie została wydana.
    OdświeÅŧ by zobaczyć nową, zmodyfikowaną treść strony.", - "This site requests permission:": "Ta strona wymaga uprawnień:", - "_(Accept)": "Przyznaj uprawnienia", - - "Sign and publish": "Podpisz i opublikuj", - "Restart ZeroNet client?": "Uruchomić ponownie klienta ZeroNet?", - "Restart": "Uruchom ponownie" -} diff --git a/src/Translate/languages/pt-br.json b/src/Translate/languages/pt-br.json deleted file mode 100644 index a842684f..00000000 --- a/src/Translate/languages/pt-br.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "ParabÊns, a porta{0} estÃĄ aberta.
    VocÃĒ ÃŠ um membro completo da rede ZeroNet!", - "Tor mode active, every connection using Onion route.": "Modo Tor ativado, todas as conexÃĩes usam a rota Onion.", - "Successfully started Tor onion hidden services.": "Os serviços ocultos Tor onion foram inciados com sucesso.", - "Unable to start hidden services, please check your config.": "NÃŖo foi possível iniciar os serviços ocultos, por favor verifique suas configuraçÃĩes.", - "For faster connections open {0} port on your router.": "Para conexÃĩes mais rÃĄpidas, abra a porta {0} em seu roteador.", - "Your connection is restricted. Please, open {0} port on your router": "Sua conexÃŖo estÃĄ restrita. Por favor, abra a porta {0} em seu roteador", - "or configure Tor to become a full member of the ZeroNet network.": "ou configure o Tor para se tornar um membro completo da rede ZeroNet.", - - "Select account you want to use in this site:": "Selecione a conta que deseja usar nesse site:", - "currently selected": "atualmente selecionada", - "Unique to site": "Única para o site", - - "Content signing failed": "Assinatura de conteÃēdo falhou", - "Content publish queued for {0:.0f} seconds.": "PublicaÃ§ÃŖo de conteÃēdo na fila por {0:.0f} segundos.", - "Content published to {0} peers.": "ConteÃēdo publicado para {0} peers.", - "No peers found, but your content is ready to access.": "Nenhum peer encontrado, mas seu conteÃēdo estÃĄ pronto para ser acessado.", - "Your network connection is restricted. Please, open {0} port": "Sua conexÃŖo de rede estÃĄ restrita. Por favor, abra a porta {0}", - "on your router to make your site accessible for everyone.": "em seu roteador para tornar seu site acessível para todos.", - "Content publish failed.": "PublicaÃ§ÃŖo de conteÃēdo falhou.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Esse arquivo ainda estÃĄ sincronizado, se escreve-lo agora o conteÃēdo anterior poderÃĄ ser perdido.", - "Write content anyway": "Escrever o conteÃēdo mesmo assim", - "New certificate added:": "Novo certificado adicionado:", - "You current certificate:": "Seu certificado atual:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "Alterar para {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificado alterado para: {auth_type}/{auth_user_name}@{domain}.", - "Site cloned": "Site clonado", - - "You have successfully changed the web interface's language!": "VocÃĒ alterou o idioma da interface web com sucesso!", - "Due to the browser's caching, the full transformation could take some minute.": "Devido ao cache do navegador, a transformaÃ§ÃŖo completa pode levar alguns minutos.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "A conexÃŖo com UiServer Websocket foi perdida. Reconectando...", - "Connection with UiServer Websocket recovered.": "ConexÃŖo com UiServer Websocket recuperada.", - "UiServer Websocket error, please reload the page.": "Erro de UiServer Websocket, por favor atualize a pÃĄgina.", - "   Connecting...": "   Conectando...", - "Site size: ": "Tamanho do site: ", - "MB is larger than default allowed ": "MB Ê maior do que o tamanho permitido por padrÃŖo", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Abrir site e definir limite de tamanho para \" + site_info.next_size_limit + \"MBs", - " files needs to be downloaded": " os arquivos precisam ser baixados", - " downloaded": " baixados", - " download failed": " falha no download", - "Peers found: ": "Peers encontrados: ", - "No peers found": "Nenhum peer encontrado", - "Running out of size limit (": "Passando do tamanho limite (", - "Set limit to \" + site_info.next_size_limit + \"MB": "Definir limite para \" + site_info.next_size_limit + \"MB", - "Site size limit changed to {0}MB": "Limite de tamanho do site alterado para {0}MBs", - " New version of this page has just released.
    Reload to see the modified content.": " Uma nova versÃŖo desse site acaba de ser publicada.
    Atualize para ver o conteÃēdo modificado.", - "This site requests permission:": "Esse site solicita permissÃŖo:", - "_(Accept)": "Conceder", - - "Save": "Salvar", - "Trackers announcing": "Trackers anunciando", - "Error": "Erro", - "Done": "Concluído", - "Tracker connection error detected.": "Erro de conexÃŖo com tracker foi detectado." - -} diff --git a/src/Translate/languages/ru.json b/src/Translate/languages/ru.json deleted file mode 100644 index 96c84b91..00000000 --- a/src/Translate/languages/ru.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "ĐŸĐžĐˇĐ´Ņ€Đ°Đ˛ĐģŅĐĩĐŧ, Đ˛Đ°Ņˆ ĐŋĐžŅ€Ņ‚ {0} ĐžŅ‚ĐēҀҋ҂.
    Đ’Ņ‹ ĐŋĐžĐģĐŊĐžŅ†ĐĩĐŊĐŊŅ‹Đš ŅƒŅ‡Đ°ŅŅ‚ĐŊиĐē ҁĐĩŅ‚Đ¸ ZeroNet!", - "Tor mode active, every connection using Onion route.": "Đ ĐĩĐļиĐŧ Tor вĐēĐģŅŽŅ‡ĐĩĐŊ, Đ˛ŅĐĩ ŅĐžĐĩдиĐŊĐĩĐŊĐ¸Ņ ĐžŅŅƒŅ‰ĐĩŅŅ‚Đ˛ĐģŅŅŽŅ‚ŅŅ ҇ĐĩŅ€ĐĩС Tor.", - "Successfully started Tor onion hidden services.": "ĐĄĐēŅ€Ņ‹Ņ‚Ņ‹Đš ҁĐĩŅ€Đ˛Đ¸Ņ Tor СаĐŋŅƒŅ‰ĐĩĐŊĐž ҃ҁĐŋĐĩ҈ĐŊĐž.", - "Unable to start hidden services, please check your config.": "ĐžŅˆĐ¸ĐąĐēа ĐŋŅ€Đ¸ СаĐŋ҃ҁĐēĐĩ ҁĐēŅ€Ņ‹Ņ‚ĐžĐŗĐž ҁĐĩŅ€Đ˛Đ¸ŅĐ°, ĐŋĐžĐļаĐģŅƒĐšŅŅ‚Đ° ĐŋŅ€ĐžĐ˛ĐĩŅ€ŅŒŅ‚Đĩ ĐŊĐ°ŅŅ‚Ņ€ĐžĐšĐēи", - "For faster connections open {0} port on your router.": "ДĐģŅ йОĐģĐĩĐĩ ĐąŅ‹ŅŅ‚Ņ€ĐžĐš Ņ€Đ°ĐąĐžŅ‚Ņ‹ ҁĐĩŅ‚Đ¸ ĐžŅ‚ĐēŅ€ĐžĐšŅ‚Đĩ {0} ĐŋĐžŅ€Ņ‚ ĐŊа Đ˛Đ°ŅˆĐĩĐŧ Ņ€ĐžŅƒŅ‚ĐĩŅ€Đĩ.", - "Your connection is restricted. Please, open {0} port on your router": "ПодĐēĐģŅŽŅ‡ĐĩĐŊиĐĩ ĐžĐŗŅ€Đ°ĐŊĐ¸Ņ‡ĐĩĐŊĐž. ПоĐļаĐģŅƒĐšŅŅ‚Đ° ĐžŅ‚ĐēŅ€ĐžĐšŅ‚Đĩ {0} ĐŋĐžŅ€Ņ‚ ĐŊа Đ˛Đ°ŅˆĐĩĐŧ Ņ€ĐžŅƒŅ‚ĐĩŅ€Đĩ", - "or configure Tor to become a full member of the ZeroNet network.": "иĐģи ĐŊĐ°ŅŅ‚Ņ€ĐžĐšŅ‚Đĩ Tor Ņ‡Ņ‚Đž ĐąŅ‹ ŅŅ‚Đ°Ņ‚ŅŒ ĐŋĐžĐģĐŊĐžŅ†ĐĩĐŊĐŊŅ‹Đŧ ŅƒŅ‡Đ°ŅŅ‚ĐŊиĐēĐžĐŧ ҁĐĩŅ‚Đ¸ ZeroNet.", - - "Select account you want to use in this site:": "Đ’Ņ‹ĐąĐĩŅ€Đ¸Ņ‚Đĩ аĐēĐēĐ°ŅƒĐŊŅ‚ Đ´ĐģŅ Đ¸ŅĐŋĐžĐģŅŒĐˇĐžĐ˛Đ°ĐŊĐ¸Ņ ĐŊа ŅŅ‚ĐžĐŧ ŅĐ°ĐšŅ‚Đĩ:", - "currently selected": "ҁĐĩĐšŅ‡Đ°Ņ Đ˛Ņ‹ĐąŅ€Đ°ĐŊ", - "Unique to site": "ĐŖĐŊиĐēаĐģҌĐŊŅ‹Đš Đ´ĐģŅ ŅŅ‚ĐžĐŗĐž ŅĐ°ĐšŅ‚Đ°", - - "Content signing failed": "ПодĐŋĐ¸ŅŅŒ ĐēĐžĐŊŅ‚ĐĩĐŊŅ‚Đ° ĐŊĐĩ ŅƒĐ´Đ°ĐģĐ°ŅŅŒ", - "Content publish queued for {0:.0f} seconds.": "ĐŸŅƒĐąĐģиĐēĐ°Ņ†Đ¸Ņ ĐēĐžĐŊŅ‚ĐĩĐŊŅ‚Đ° ĐŋĐžŅŅ‚Đ°Đ˛ĐģĐĩĐŊа в ĐžŅ‡ĐĩŅ€ĐĩĐ´ŅŒ {0:.0f} ҁĐĩĐē҃ĐŊĐ´.", - "Content published to {0} peers.": "КоĐŊŅ‚ĐĩĐŊŅ‚ ĐžĐŋŅƒĐąĐģиĐēОваĐŊ ĐŊа {0} ĐŋĐ¸Ņ€Đ°Ņ….", - "No peers found, but your content is ready to access.": "ĐŸĐ¸Ņ€Ņ‹ ĐŊĐĩ ĐŊаКдĐĩĐŊŅ‹, ĐŊĐž Đ˛Đ°Ņˆ ĐēĐžĐŊŅ‚ĐĩĐŊŅ‚ Đ´ĐžŅŅ‚ŅƒĐŋĐĩĐŊ.", - "Your network connection is restricted. Please, open {0} port": "Đ’Đ°ŅˆĐĩ ĐŋОдĐēĐģŅŽŅ‡ĐĩĐŊиĐĩ ĐžĐŗŅ€Đ°ĐŊĐ¸Ņ‡ĐĩĐŊĐž. ПоĐļаĐģŅƒĐšŅŅ‚Đ° ĐžŅ‚ĐēŅ€ĐžĐšŅ‚Đĩ {0} ĐŋĐžŅ€Ņ‚. ", - "on your router to make your site accessible for everyone.": "ĐŊа Đ˛Đ°ŅˆĐĩĐŧ Ņ€ĐžŅƒŅ‚ĐĩŅ€Đĩ, Ņ‡Ņ‚Đž ĐąŅ‹ Đ˛Đ°Ņˆ ŅĐ°ĐšŅ‚ ŅŅ‚Đ°Đģ Đ´ĐžŅŅ‚ŅƒĐŋĐŊĐŗ ĐŋĐžŅĐĩŅ‚Đ¸Ņ‚ĐĩĐģŅĐŧ.", - "Content publish failed.": "ĐžŅˆĐ¸ĐąĐēа ĐŋŅ€Đ¸ ĐŋŅƒĐąĐģиĐēĐ°Ņ†Đ¸Đ¸ ĐēĐžĐŊŅ‚ĐĩĐŊŅ‚Đ°.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Đ­Ņ‚ĐžŅ‚ Ņ„Đ°ĐšĐģ Đ˛ŅŅ‘ Đĩ҉Đĩ ŅĐ¸ĐŊŅ…Ņ€ĐžĐŊĐ¸ĐˇĐ¸Ņ€ŅƒĐĩŅ‚ŅŅ, ĐĩҁĐģи ĐŋŅ€ĐžĐ´ĐžĐģĐļĐ¸Ņ‚ŅŒ ĐĩĐŗĐž иСĐŧĐĩĐŊĐĩĐŊиĐĩ, ĐŋŅ€ĐĩĐ´Ņ‹Đ´ŅƒŅ‰Đ¸Đš ĐēĐžĐŊŅ‚ĐĩĐŊŅ‚ ĐŧĐžĐļĐĩŅ‚ ĐąŅ‹Ņ‚ŅŒ ĐŋĐžŅ‚ĐĩŅ€ŅĐŊ.", - "Write content anyway": "ЗаĐŋĐ¸ŅĐ°Ņ‚ŅŒ ĐēĐžĐŊŅ‚ĐĩĐŊŅ‚ в ĐģŅŽĐąĐžĐŧ ҁĐģŅƒŅ‡Đ°Đĩ", - "New certificate added:": "ДобавĐģĐĩĐŊ ĐŊĐžĐ˛Ņ‹Đš ҁĐĩŅ€Ņ‚Đ¸Ņ„Đ¸ĐēĐ°Ņ‚:", - "You current certificate:": "Đ’Đ°Ņˆ Ņ‚ĐĩĐēŅƒŅ‰Đ¸Đš ҁĐĩŅ€Ņ‚Đ¸Ņ„Đ¸ĐēĐ°Ņ‚: ", - "Change it to {auth_type}/{auth_user_name}@{domain}": "ИСĐŧĐĩĐŊĐ¸Ņ‚ŅŒ ĐĩĐŗĐž ĐŊа {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "ĐĄĐĩŅ€Ņ‚Đ¸Ņ„Đ¸ĐēĐ°Ņ‚ иСĐŧĐĩĐŊĐĩĐŊ ĐŊа: {auth_type}/{auth_user_name}@{domain}.", - "Site cloned": "ĐĄĐ°ĐšŅ‚ ҁĐēĐģĐžĐŊĐ¸Ņ€ĐžĐ˛Đ°ĐŊ", - - "You have successfully changed the web interface's language!": "Đ¯ĐˇŅ‹Đē иĐŊŅ‚ĐĩҀ҄ĐĩĐšŅĐ° ҃ҁĐŋĐĩ҈ĐŊĐž иСĐŧĐĩĐŊĐĩĐŊ!", - "Due to the browser's caching, the full transformation could take some minute.": "В ĐˇĐ°Đ˛Đ¸ŅĐ¸ĐŧĐžŅŅ‚Đ¸ ĐžŅ‚ Ņ€Đ°ĐąĐžŅ‚Ņ‹ Đ˛Đ°ŅˆĐĩĐŗĐž ĐąŅ€Đ°ŅƒĐˇĐĩŅ€Đ° ĐŋĐžĐģĐŊĐžĐĩ ĐŋŅ€ĐĩĐžĐąŅ€Đ°ĐˇĐžĐ˛Đ°ĐŊиĐĩ ĐŧĐžĐļĐĩŅ‚ СаĐŊŅŅ‚ŅŒ ĐŋĐ°Ņ€Ņƒ ĐŧиĐŊŅƒŅ‚.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "ПодĐēĐģŅŽŅ‡ĐĩĐŊиĐĩ Đē UiServer Websocket ĐŋŅ€ĐĩŅ€Đ˛Đ°ĐŊĐž. ПĐĩŅ€ĐĩĐŋОдĐēĐģŅŽŅ‡Đ°ŅŽŅŅŒ...", - "Connection with UiServer Websocket recovered.": "ПодĐēĐģŅŽŅ‡ĐĩĐŊиĐĩ Đē UiServer Websocket Đ˛ĐžŅŅŅ‚Đ°ĐŊОвĐģĐĩĐŊĐž.", - "UiServer Websocket error, please reload the page.": "ĐžŅˆĐ¸ĐąĐēа UiServer Websocket, ĐŋĐĩŅ€ĐĩĐˇĐ°ĐŗŅ€ŅƒĐˇĐ¸Ņ‚Đĩ ŅŅ‚Ņ€Đ°ĐŊĐ¸Ņ†Ņƒ!", - "   Connecting...": "   ĐŸĐžĐ´ĐēĐģŅŽŅ‡ĐĩĐŊиĐĩ...", - "Site size: ": "РаСĐŧĐĩŅ€ ŅĐ°ĐšŅ‚Đ°: ", - "MB is larger than default allowed ": "MB йОĐģҌ҈Đĩ ҇ĐĩĐŧ Ņ€Đ°ĐˇŅ€Đĩ҈ĐĩĐŊĐž ĐŋĐž ҃ĐŧĐžĐģŅ‡Đ°ĐŊĐ¸ŅŽ ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "ĐžŅ‚ĐēŅ€Ņ‹Ņ‚ŅŒ ŅĐ°ĐšŅ‚ и ŅƒŅŅ‚Đ°ĐŊĐžĐ˛Đ¸Ņ‚ŅŒ ĐģиĐŧĐ¸Ņ‚ СаĐŊиĐŧаĐĩĐŧĐžĐŗĐž ĐŧĐĩŅŅ‚Đ° ĐŊа \" + site_info.next_size_limit + \"MB", - " files needs to be downloaded": " Ņ„Đ°ĐšĐģŅ‹ Đ´ĐžĐģĐļĐŊŅ‹ ĐąŅ‹Ņ‚ŅŒ ĐˇĐ°ĐŗŅ€ŅƒĐļĐĩĐŊŅ‹", - " downloaded": " ĐˇĐ°ĐŗŅ€ŅƒĐļĐĩĐŊĐž", - " download failed": " ĐžŅˆĐ¸ĐąĐēа ĐˇĐ°ĐŗŅ€ŅƒĐˇĐēи", - "Peers found: ": "ĐŸĐ¸Ņ€ĐžĐ˛ ĐŊаКдĐĩĐŊĐž: ", - "No peers found": "ĐŸĐ¸Ņ€Ņ‹ ĐŊĐĩ ĐŊаКдĐĩĐŊŅ‹", - "Running out of size limit (": "Đ”ĐžŅŅ‚ŅƒĐŋĐŊĐžĐĩ ĐŧĐĩŅŅ‚Đž СаĐēĐžĐŊŅ‡Đ¸ĐģĐžŅŅŒ (", - "Set limit to \" + site_info.next_size_limit + \"MB": "ĐŖŅŅ‚Đ°ĐŊĐžĐ˛Đ¸Ņ‚ŅŒ ĐģиĐŧĐ¸Ņ‚ ĐŊа \" + site_info.next_size_limit + \"MB", - "Site size limit changed to {0}MB": "ЛиĐŧĐ¸Ņ‚ ĐŋаĐŧŅŅ‚Đ¸ ĐŊа Đ´Đ¸ŅĐēĐĩ иСĐŧĐĩĐŊĐĩĐŊ ĐŊа {0}MB", - " New version of this page has just released.
    Reload to see the modified content.": "Đ”ĐžŅŅ‚ŅƒĐŋĐŊа ĐŊĐžĐ˛Đ°Ņ вĐĩŅ€ŅĐ¸Ņ даĐŊĐŊОК ŅŅ‚Ņ€Đ°ĐŊĐ¸Ņ†Ņ‹
    ОбĐŊĐžĐ˛Đ¸Ņ‚Đĩ ŅŅ‚Ņ€Đ°ĐŊĐ¸Ņ†Ņƒ, Ņ‡Ņ‚Đž ĐąŅ‹ ŅƒĐ˛Đ¸Đ´ĐĩŅ‚ŅŒ иСĐŧĐĩĐŊĐĩĐŊĐ¸Ņ!", - "This site requests permission:": "ДаĐŊĐŊŅ‹Đš ŅĐ°ĐšŅ‚ СаĐŋŅ€Đ°ŅˆĐ¸Đ˛Đ°ĐĩŅ‚ Ņ€Đ°ĐˇŅ€Đĩ҈ĐĩĐŊĐ¸Ņ:", - "_(Accept)": "ĐŸŅ€ĐĩĐ´ĐžŅŅ‚Đ°Đ˛Đ¸Ņ‚ŅŒ" - -} diff --git a/src/Translate/languages/sk.json b/src/Translate/languages/sk.json deleted file mode 100644 index 8fb4554b..00000000 --- a/src/Translate/languages/sk.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "BlahoÅželÃĄme, vÃĄÅĄ port {0} je otvorenÃŊ.
    Ste ÃēplnÃŊm členom siete ZeroNet!", - "Tor mode active, every connection using Onion route.": "Tor mÃŗd aktívny, vÅĄetky spojenia teraz pouŞívajÃē Onion sieÅĨ.", - "Successfully started Tor onion hidden services.": "Tor ÃēspeÅĄne spustenÃŊ.", - "Unable to start hidden services, please check your config.": "Nebolo moÅžnÊ spustiÅĨ Tor, prosím skontrolujte nastavenia.", - "For faster connections open {0} port on your router.": "Pre rÃŊchlejÅĄie spojenie otvorte na vaÅĄom routery port {0}", - "Your connection is restricted. Please, open {0} port on your router": "VaÅĄe pripojenie je obmedzenÊ. Prosím otvorte port {0} na vaÅĄom routery.", - "or configure Tor to become a full member of the ZeroNet network.": "alebo nastavte Tor aby ste sa tali plnÃŊm členom siete ZeroNet.", - - "Select account you want to use in this site:": "ZvoÄžte Ãēčet ktorÃŊ chcete pouŞívaÅĨ na tejto strÃĄnke:", - "currently selected": "aktuÃĄlne zvolenÊ", - "Unique to site": "UnikÃĄtny pre strÃĄnku", - - "Content signing failed": "Podpísanie obsahu zlyhalo", - "Content publish queued for {0:.0f} seconds.": "Podpísanie obsahu bude na rade za {0:.0f} sekÃēnd", - "Content published to {0} peers.": "Obsah publikovanÃŊ {0} peer-erom", - "No peers found, but your content is ready to access.": "Neboli nÃĄjdenÃŊ Åžiadny peer-ery, ale vÃĄÅĄ obsah je pripravenÃŊ pre prístup.", - "Your network connection is restricted. Please, open {0} port": "VaÅĄe pripojenie k sieti je obmedzenÊ. Prosím otvorte port {0} na vaÅĄom routery.", - "on your router to make your site accessible for everyone.": "na vaÅĄom routery aby bola vaÅĄa strÃĄnka prístupnÃĄ pre vÅĄetkÃŊch.", - "Content publish failed.": "Publikovanie obsahu zlyhalo.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Tento sÃēbor sa stÃĄle synchronizuje, ak v ňom spravíte zmeny, predchÃĄdzajÃēci obsah sa môŞe stratiÅĨ.", - "Write content anyway": "Aj tak spraviÅĨ zmeny", - "New certificate added:": "PridanÃŊ novÃŊ certifikÃĄt:", - "You current certificate:": "VÃĄÅĄ aktuÃĄlny certifikÃĄt:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "ZvoÄžte to na {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "CertifikÃĄt zmenenÃŊ na: {auth_type}/{auth_user_name}@{domain}.", - "Site cloned": "StrÃĄnka naklonovanÃĄ", - - "You have successfully changed the web interface's language!": "ÚspeÅĄne ste zmenili jazyk webovÊho rozhrania!", - "Due to the browser's caching, the full transformation could take some minute.": "Kôli cachu webovÊho prehliadavača, ceÄžkovÃĄ transformÃĄcia môŞe chvíÄēu trvaÅĨ.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "Spojenie s UiServer Websocket bolo stratenÊ. Znovu pripÃĄjame...", - "Connection with UiServer Websocket recovered.": "Spojenie s UiServer Websocket obnovenÊ.", - "UiServer Websocket error, please reload the page.": "Chyba UiServer Websocket-u, prosím znovu načítajte strÃĄnku.", - "   Connecting...": "   PripÃĄjanie...", - "Site size: ": "VeÄžkosÅĨ strÃĄnky: ", - "MB is larger than default allowed ": "MB je viac ako povolenÃĄ hodnota", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "OtvoriÅĨ strÃĄnku a nastaviÅĨ limit veÄžkosti na \" + site_info.next_size_limit + \"MB", - " files needs to be downloaded": " sÃēbory je potrebnÊ stiahnuÅĨ", - " downloaded": " stiahnutÊ", - " download failed": " sÅĨahovanie zlyhalo", - "Peers found: ": "Peer-erov nÃĄjdenÃŊch: ", - "No peers found": "Neboli nÃĄjdenÃŊ Åžiadny peer-ery", - "Running out of size limit (": "Presahuje povolenÃŊ limit veÄžkosti pamäte (", - "Set limit to \" + site_info.next_size_limit + \"MB": "NastaviÅĨ limit na \" + site_info.next_size_limit + \"MB ändern", - "Site size limit changed to {0}MB": "Limit veÄžkosti pamäte nastavenÃŊ na {0}MB", - " New version of this page has just released.
    Reload to see the modified content.": " Bola vydanÃĄ novÃĄ verzia tejto strÃĄnky.
    Znovu načítajte tÃēto strÃĄnku aby bolo vidieÅĨ zmeny.", - "This site requests permission:": "TÃĄto strÃĄnka vyÅžaduje povolenie:", - "_(Accept)": "UdeliÅĨ", - - "on": "", - "Oct": "Okt", - "May": "MÃĄj", - "Jun": "JÃēn", - "Jul": "JÃēl" - -} diff --git a/src/Translate/languages/sl.json b/src/Translate/languages/sl.json deleted file mode 100644 index 2aeb628e..00000000 --- a/src/Translate/languages/sl.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Čestitke, vaÅĄa vrata {0} so odprta.
    Postali ste polnopravni član ZeroNet omreÅžja!", - "Tor mode active, every connection using Onion route.": "Način Tor aktiven.", - "Successfully started Tor onion hidden services.": "Storitve Tor uspeÅĄno zagnane.", - "Unable to start hidden services, please check your config.": "Ni bilo mogoče zagnati Tor storitev. Preverite nastavitve.", - "For faster connections open {0} port on your router.": "Za hitrejÅĄe povezave na svojem usmerjevalniku odprite vrata {0}.", - "Your connection is restricted. Please, open {0} port on your router": "VaÅĄa povezava je omejena. Na svojem usmerjevalniku odprite vrata {0}", - "or configure Tor to become a full member of the ZeroNet network.": "ali nastavite Tor, da postanete polnopravni član ZeroNet omreÅžja.", - - "Select account you want to use in this site:": "Izberite račun, ki ga Åželite uporabiti na tem spletnem mestu:", - "currently selected": "trenutno izbrano", - "Unique to site": "Edinstven za spletno mesto", - - "Content signing failed": "Podpisovanje vsebine ni uspelo", - "Content publish queued for {0:.0f} seconds.": "Objava vsebine na čakanju za {0:.0f} sekund.", - "Content published to {0} peers.": "Vsebina objavljena na {0} povezavah.", - "No peers found, but your content is ready to access.": "Ni nobenih povezav, vendar je vaÅĄa vsebina pripravljena za dostop.", - "Your network connection is restricted. Please, open {0} port": "VaÅĄa povezava je omejena. Prosimo, odprite vrata {0}", - "on your router to make your site accessible for everyone.": "na vaÅĄem usmerjevalniku, da bo vaÅĄe spletno mesto dostopno za vse.", - "Content publish failed.": "Objavljanje vsebine ni uspelo.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Ta datoteka se ÅĄe vedno sinhronizira. Če jo uredite zdaj, se lahko zgodi, da bo prejÅĄnja vsebina izgubljena.", - "Write content anyway": "Vseeno uredi vsebino", - "New certificate added:": "Dodano novo potrdilo:", - "You current certificate:": "Trenutno potrdilo:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "Spremenite ga na {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Potrdilo spremenjeno na: {auth_type}/{auth_user_name}@{domain}.", - "Site cloned": "Stran klonirana", - - "You have successfully changed the web interface's language!": "UspeÅĄno ste spremenili jezik spletnega vmesnika!", - "Due to the browser's caching, the full transformation could take some minute.": "Zaradi predpomnjenja brskalnika lahko popolna preobrazba traja nekaj minut.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "Povezava z UiServer Websocket je bila izgubljena. Ponovno povezovanje ...", - "Connection with UiServer Websocket recovered.": "Povezava z UiServer Websocket je vzpostavljena.", - "UiServer Websocket error, please reload the page.": "Napaka UiServer Websocket. Prosimo osveÅžite stran.", - "   Connecting...": "   Povezovanje ...", - "Site size: ": "Velikost strani: ", - "MB is larger than default allowed ": "MB je večja od dovoljenih", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Odpri to stran in nastavi omejitev na \" + site_info.next_size_limit + \"MB", - " files needs to be downloaded": " datotek mora biti preneÅĄenih", - " downloaded": " preneseno", - " download failed": " prenos ni uspel", - "Peers found: ": "Najdene povezave: ", - "No peers found": "Ni najdenih povezav", - "Running out of size limit (": "Zmanjkuje dovoljenega prostora (", - "Set limit to \" + site_info.next_size_limit + \"MB": "Nastavi omejitev na \" + site_info.next_size_limit + \"MB", - "Site size limit changed to {0}MB": "Omejitev strani nastavljena na{0} MB", - " New version of this page has just released.
    Reload to see the modified content.": " Ravnokar je bila objavljena nova različica te strani.
    OsveÅžite jo, da boste videli novo vsebino.", - "This site requests permission:": "Ta stran zahteva dovoljenja:", - "_(Accept)": "Dovoli" - -} diff --git a/src/Translate/languages/tr.json b/src/Translate/languages/tr.json deleted file mode 100644 index 09a1bdb5..00000000 --- a/src/Translate/languages/tr.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Tebrikler, portunuz ({0}) aÃ§Äąk.
    ArtÄąk ZeroNet ağına katÄąldÄąnÄąz!", - "Tor mode active, every connection using Onion route.": "Tor aktif, tÃŧm bağlantÄąlar Onion yÃļnlendircisini kullanÄąyor.", - "Successfully started Tor onion hidden services.": "Gizli Tor hizmetleri başlatÄąldÄą.", - "Unable to start hidden services, please check your config.": "Gizli hizmetler başlatÄąlamadÄą, lÃŧtfen ayarlarÄąnÄązÄą kontrol ediniz.", - "For faster connections open {0} port on your router.": "Daha hÄązlÄą bağlantÄą için {0} nolu portu bilgisayarÄąnÄąza yÃļnlendirin.", - "Your connection is restricted. Please, open {0} port on your router": "SÄąnÄąrlÄą bağlantÄą. LÃŧtfen, {0} nolu portu bilgisayarÄąnÄąza yÃļnlendirin", - "or configure Tor to become a full member of the ZeroNet network.": "ya da ZeroNet ağına tam olarak katÄąlabilmek için Tor'u kullanÄąn.", - - "Select account you want to use in this site:": "Bu sitede kullanmak için bir hesap seçiniz:", - "currently selected": "kullanÄąlan", - "Unique to site": "Bu site için benzersiz", - - "Content signing failed": "İçerik imzalama başarÄąsÄąz oldu", - "Content publish queued for {0:.0f} seconds.": "İçerik yayÄąmlanmak Ãŧzere {0:.0f} saniyedir kuyrukta.", - "Content published to {0} peers.": "İçerik {0} eşe dağıtÄąldÄą.", - "No peers found, but your content is ready to access.": "Eş bulunamadÄą, ama içeriğiniz erişime hazÄąr.", - "Your network connection is restricted. Please, open {0} port": "SÄąnÄąrlÄą bağlantÄą. LÃŧtfen, {0} nolu portu bilgisayarÄąnÄąza yÃļnlendirin", - "on your router to make your site accessible for everyone.": "bÃļylece sitenizi herkes için erişilebilir yapabilirsiniz", - "Content publish failed.": "İçerik yayÄąmlama başarÄąsÄąz oldu.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Bu dosya hala gÃŧncelleniyor, eğer şimdi kaydederseniz, Ãļnceki içerik kaybolabilir.", - "Write content anyway": "Yine de kaydet", - "New certificate added:": "Yeni sertifika eklendi:", - "You current certificate:": "KullanÄąlan sertifikanÄąz:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "{auth_type}/{auth_user_name}@{domain} olarak değiştir.", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "{auth_type}/{auth_user_name}@{domain} olarak değiştirildi", - "Site cloned": "Site klonlandÄą", - - "You have successfully changed the web interface's language!": "WEB ara yÃŧzÃŧ için dil başarÄąyla değiştirildi!", - "Due to the browser's caching, the full transformation could take some minute.": "Tam dÃļnÃŧşÃŧmÃŧn sağlanmasÄą, tarayÄącÄą Ãļnbelleklemesi yÃŧzÃŧnden zaman alabilir.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocket ile bağlantÄą kesildi. Yeniden bağlanÄąlÄąyor...", - "Connection with UiServer Websocket recovered.": "UiServer Websocket ile bağlantÄą yeniden kuruldu.", - "UiServer Websocket error, please reload the page.": "UiServer Websocket hatasÄą, lÃŧtfen sayfayÄą yenileyin.", - "   Connecting...": "   BağlanÄąyor...", - "Site size: ": "Site boyutu: ", - "MB is larger than default allowed ": "MB izin verilenden fazla ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Siteyi aÃ§Äąn ve boyut sÄąnÄąrÄąnÄą \" + site_info.next_size_limit + \"MB'ye yÃŧkseltin", - " files needs to be downloaded": " indirilmesi gereken dosyalar", - " downloaded": " indirildi", - " download failed": " indirme başarÄąsÄąz", - "Peers found: ": "Bulunan eşler: ", - "No peers found": "Eş bulunamadÄą", - "Running out of size limit (": "Boyut sÄąnÄąrlamasÄąnÄą aştÄą (", - "Set limit to \" + site_info.next_size_limit + \"MB": "SÄąnÄąrlamayÄą \" + site_info.next_size_limit + \"MB'ye yÃŧkselt", - "Site size limit changed to {0}MB": "Site boyut sÄąnÄąrlamasÄą {0}MB olarak ayarlandÄą", - " New version of this page has just released.
    Reload to see the modified content.": " Bu sayfanÄąn yeni versiyonu yayÄąmlandÄą.
    Değişen içeriği gÃļrmek için yeniden yÃŧkleyiniz.", - "This site requests permission:": "Bu site bir izin istiyor:", - "_(Accept)": "İzin ver" - -} diff --git a/src/Translate/languages/zh-tw.json b/src/Translate/languages/zh-tw.json deleted file mode 100644 index 0ec071b4..00000000 --- a/src/Translate/languages/zh-tw.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "įĨčŗ€īŧŒäŊ įš„埠 ({0}) 厞į‰“開。
    äŊ åˇ˛į˜¯ ZeroNet įļ˛čˇ¯įš„æ­Ŗåŧæˆå“Ąäē†īŧ", - "Tor mode active, every connection using Onion route.": "Tor æ¨Ąåŧå•Ÿį”¨īŧŒæ¯å€‹é€ŖæŽĨæ­Ŗåœ¨äŊŋį”¨æ´‹č”Ĩčˇ¯į”ąã€‚", - "Successfully started Tor onion hidden services.": "成功啟動 Tor 洋č”Ĩéšąč—æœå‹™ã€‚", - "Unable to start hidden services, please check your config.": "į„Ąæŗ•æ‰“é–‹éšąč—æœå‹™īŧŒčĢ‹æĒĸæŸĨäŊ įš„配įŊŽã€‚", - "For faster connections open {0} port on your router.": "į‚ēä熿›´åŋĢįš„é€ŖæŽĨčĢ‹åœ¨čˇ¯į”ąå™¨ä¸Šæ‰“é–‹ {0} 埠。", - "Your connection is restricted. Please, open {0} port on your router": "äŊ įš„逪æŽĨ受限åˆļ。čĢ‹åœ¨äŊ įš„čˇ¯į”ąå™¨ä¸Šæ‰“é–‹ {0} 埠", - "or configure Tor to become a full member of the ZeroNet network.": "æˆ–č€…é…įŊŽäŊ įš„ Tor 䞆成į‚ē ZeroNet įš„æ­Ŗåŧæˆå“Ąã€‚", - - "Select account you want to use in this site:": "選擇äŊ čĻåœ¨é€™å€‹įļ˛įĢ™äŊŋį”¨įš„å¸ŗæˆļīŧš", - "currently selected": "į•ļ前選擇", - "Unique to site": "įļ˛įĢ™į¨æœ‰čēĢäģŊ", - - "Content signing failed": "內厚į°ŊįŊ˛å¤ąæ•—", - "Content publish queued for {0:.0f} seconds.": "å…§åŽšåˇ˛åŠ å…Ĩ {0:.0f} į§’åžŒįš„į™ŧäŊˆéšŠåˆ—。", - "Content published to {0}/{1} peers.": "å…§åŽšåˇ˛į™ŧäŊˆåˆ° {0}/{1} 個ᝀéģžã€‚", - "Content published to {0} peers.": "å…§åŽšåˇ˛į™ŧäŊˆåˆ° {0} 個ᝀéģžã€‚", - "No peers found, but your content is ready to access.": "æ‰žä¸åˆ°į¯€éģžīŧŒäŊ†æ˜¯äŊ įš„å…§åŽšåˇ˛įē–å‚™åĨŊčĸĢč¨Ē問。", - "Your network connection is restricted. Please, open {0} port": "äŊ įš„įļ˛čˇ¯é€ŖæŽĨ受限åˆļ。čĢ‹åœ¨äŊ įš„čˇ¯į”ąå™¨ä¸Šæ‰“é–‹ {0} 埠", - "on your router to make your site accessible for everyone.": "įĸēäŋäŊ įš„įļ˛įĢ™čƒŊčĸĢæ¯ä¸€å€‹äēēč¨Ē問。", - "Content publish failed.": "內厚į™ŧäŊˆå¤ąæ•—。", - "This file still in sync, if you write it now, then the previous content may be lost.": "這個æĒ”äģį„ļ在同æ­Ĩ中īŧŒåĻ‚æžœäŊ įžåœ¨å¯Ģå…Ĩ厃īŧŒäš‹å‰įš„內厚可čƒŊ會čĸĢä¸Ÿå¤ąã€‚", - "Write content anyway": "åŧˇåˆļå¯Ģå…Ĩ內厚", - "New certificate added:": "æ–°č­‰æ›¸īŧš", - "You current certificate:": "äŊ į•ļå‰įš„č­‰æ›¸īŧš", - "Change it to {auth_type}/{auth_user_name}@{domain}": "æ”ščŽŠč‡ŗ {auth_type}/{auth_user_name}@{domain}-ra", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "č­‰æ›¸æ›´æ”šč‡ŗīŧš{auth_type}/{auth_user_name}@{domain}。", - "Site cloned": "įļ˛įĢ™åˇ˛å…‹éš†", - - "You have successfully changed the web interface's language!": "äŊ åˇ˛įˆåŠŸæ”ščފäē† Web į•Œéĸįš„čĒžč¨€īŧ", - "Due to the browser's caching, the full transformation could take some minute.": "į”ąæ–ŧäŊ įš„į€čĻŊå™¨įˇŠå­˜īŧŒåŽŒæ•´įš„įŋģč­¯å¯čƒŊ需čĻčŠąåšžåˆ†é˜ã€‚", - - "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocket įš„é€Ŗįˇšåˇ˛ä¸Ÿå¤ąã€‚é‡æ–°é€Ŗįˇšä¸­...", - "Connection with UiServer Websocket recovered.": "UiServer Websocket įš„é€Ŗįˇšåˇ˛æĸ垊。", - "UiServer Websocket error, please reload the page.": "UiServer Websocket 錯čǤīŧŒčĢ‹é‡æ–°čŧ‰å…Ĩ頁éĸ。", - "   Connecting...": "   é€Ŗįˇšä¸­...", - "Site size: ": "įļ˛įĢ™å¤§å°īŧš", - "MB is larger than default allowed ": "MB æ¯”é č¨­å…č¨ąįš„å€ŧ更大 ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "打開įļ˛įĢ™ä¸Ļč¨­åŽšå¤§å°é™åˆļ到 \" + site_info.next_size_limit + \"MB", - " files needs to be downloaded": " 個æĒ”需čρ䏋čŧ‰", - " downloaded": " 厞䏋čŧ‰", - " download failed": " 下čŧ‰å¤ąæ•—", - "Peers found: ": "åˇ˛æ‰žåˆ°į¯€éģžīŧš", - "No peers found": "æ‰žä¸åˆ°į¯€éģž", - "Running out of size limit (": "čļ…å‡ē大小限åˆļ", - "Set limit to \" + site_info.next_size_limit + \"MB": "č¨­åŽšé™åˆļ到 \" + site_info.next_size_limit + \"MB", - "Cloning site...": "複čŖŊįļ˛į̙䏭...", - "Site cloned": "įļ˛į̙厞複čŖŊ", - "Site size limit changed to {0}MB": "įļ˛įĢ™å¤§å°é™åˆļåˇ˛æ”ščŽŠåˆ° {0}MB", - " New version of this page has just released.
    Reload to see the modified content.": " æœŦ頁éĸįš„æ–°į‰ˆæœŦ厞įļ“į™ŧäŊˆã€‚
    重新čŧ‰å…Ĩ來æŸĨįœ‹æ›´æ”šåžŒįš„å…§åŽšã€‚", - "This site requests permission:": "這個įļ˛įĢ™įš„čĢ‹æą‚č¨ąå¯æŦŠīŧš", - "_(Accept)": "授æŦŠ" - -} diff --git a/src/Translate/languages/zh.json b/src/Translate/languages/zh.json deleted file mode 100644 index 16a40b1a..00000000 --- a/src/Translate/languages/zh.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "įĨč´ēīŧŒæ‚¨įš„įĢ¯åŖ ({0}) 厞įģæ‰“åŧ€ã€‚
    æ‚¨åˇ˛įģæ˜¯ ZeroNet įŊ‘įģœįš„æ­Ŗåŧæˆå‘˜äē†īŧ", - "Tor mode active, every connection using Onion route.": "Tor æ¨Ąåŧå¯į”¨īŧŒæ¯ä¸ĒčŋžæŽĨæ­Ŗåœ¨äŊŋį”¨æ´‹č‘ąčˇ¯į”ąã€‚", - "Successfully started Tor onion hidden services.": "成功启动 Tor æ´‹č‘ąéšč—æœåŠĄã€‚", - "Unable to start hidden services, please check your config.": "æ— æŗ•æ‰“åŧ€éšč—æœåŠĄīŧŒč¯ˇæŖ€æŸĨæ‚¨įš„é…įŊŽã€‚", - "For faster connections open {0} port on your router.": "ä¸ēä熿›´åŋĢįš„čŋžæŽĨč¯ˇåœ¨čˇ¯į”ąå™¨ä¸Šæ‰“åŧ€ {0} įĢ¯åŖã€‚", - "Your connection is restricted. Please, open {0} port on your router": "æ‚¨įš„čŋžæŽĨ受限åˆļã€‚č¯ˇåœ¨æ‚¨įš„čˇ¯į”ąå™¨ä¸Šæ‰“åŧ€ {0} įĢ¯åŖ", - "or configure Tor to become a full member of the ZeroNet network.": "æˆ–č€…é…įŊŽæ‚¨įš„ Tor æĨ成ä¸ē ZeroNet įš„æ­Ŗåŧæˆå‘˜ã€‚", - - "Select account you want to use in this site:": "选拊您čρ圍čŋ™ä¸ĒįŊ‘įĢ™äŊŋį”¨įš„å¸æˆˇ:", - "No certificate": "æ˛Ąæœ‰č¯äšĻ", - "currently selected": "åŊ“前选拊", - "Unique to site": "įŊ‘įĢ™į‹Ŧ有čēĢäģŊ", - - "Content signing failed": "å†…åŽšį­žåå¤ąč´Ĩ", - "Content publish queued for {0:.0f} seconds.": "å†…åŽšåˇ˛åŠ å…Ĩ {0:.0f} į§’åŽįš„å‘å¸ƒé˜Ÿåˆ—ã€‚", - "Content published to {0}/{1} peers.": "å†…åŽšåˇ˛å‘å¸ƒåˆ° {0}/{1} ä¸ĒčŠ‚į‚šã€‚", - "Content published to {0} peers.": "å†…åŽšåˇ˛å‘å¸ƒåˆ° {0} ä¸ĒčŠ‚į‚šã€‚", - "No peers found, but your content is ready to access.": "æ‰žä¸åˆ°čŠ‚į‚šīŧŒäŊ†æ˜¯æ‚¨įš„å†…åŽšåˇ˛įģå‡†å¤‡åĨŊčĸĢčŽŋ闎。", - "Your network connection is restricted. Please, open {0} port": "æ‚¨įš„įŊ‘įģœčŋžæŽĨ受限åˆļã€‚č¯ˇåœ¨æ‚¨įš„čˇ¯į”ąå™¨ä¸Šæ‰“åŧ€ {0} įĢ¯åŖ", - "on your router to make your site accessible for everyone.": "įĄŽäŋæ‚¨įš„įĢ™į‚ščƒŊčĸĢæ¯ä¸€ä¸ĒäēēčŽŋ闎。", - "Content publish failed.": "å†…åŽšå‘å¸ƒå¤ąč´Ĩ。", - "This file still in sync, if you write it now, then the previous content may be lost.": "čŋ™ä¸Ē文äģļäģį„ļ在同æ­Ĩ中īŧŒåĻ‚æžœæ‚¨įŽ°åœ¨å†™å…Ĩ厃īŧŒäš‹å‰įš„内厚可čƒŊäŧščĸĢä¸ĸå¤ąã€‚", - "Write content anyway": "åŧēåˆļ写å…Ĩ内厚", - "New certificate added:": "æ–°č¯äšĻīŧš", - "You current certificate:": "您åŊ“å‰įš„č¯äšĻīŧš", - "Change it to {auth_type}/{auth_user_name}@{domain}": "æ›´æ”šč‡ŗ {auth_type}/{auth_user_name}@{domain}-ra", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "蝁äšĻæ›´æ”šč‡ŗīŧš{auth_type}/{auth_user_name}@{domain}。", - "Site cloned": "įĢ™į‚šåˇ˛å…‹éš†", - - "You have successfully changed the web interface's language!": "æ‚¨åˇ˛įģæˆåŠŸæ›´æ”šäē† web į•Œéĸįš„č¯­č¨€īŧ", - "Due to the browser's caching, the full transformation could take some minute.": "į”ąäēŽæ‚¨įš„æĩč§ˆå™¨įŧ“å­˜īŧŒåŽŒæ•´įš„įŋģč¯‘å¯čƒŊ需čĻčŠąå‡ åˆ†é’Ÿã€‚", - - "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocket įš„čŋžæŽĨ厞ä¸ĸå¤ąã€‚é‡æ–°čŋžæŽĨ中...", - "Connection with UiServer Websocket recovered.": "UiServer Websocket įš„čŋžæŽĨ厞æĸ复。", - "UiServer Websocket error, please reload the page.": "UiServer Websocket 错蝝īŧŒč¯ˇé‡æ–°åŠ čŊŊéĄĩéĸ。", - "   Connecting...": "   čŋžæŽĨ中...", - "Site size: ": "įĢ™į‚šå¤§å°īŧš", - "MB is larger than default allowed ": "MB 比éģ˜čŽ¤å…čŽ¸įš„å€ŧ更大 ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "打åŧ€įĢ™į‚šåšļ莞įŊŽå¤§å°é™åˆļ到 \" + site_info.next_size_limit + \"MB", - " files needs to be downloaded": " ä¸Ē文äģļ需čρ䏋čŊŊ", - " downloaded": " 厞䏋čŊŊ", - " download failed": " 下čŊŊå¤ąč´Ĩ", - "Peers found: ": "åˇ˛æ‰žåˆ°čŠ‚į‚šīŧš", - "No peers found": "æ‰žä¸åˆ°čŠ‚į‚š", - "Running out of size limit (": "čļ…å‡ē大小限åˆļ", - "Set limit to \" + site_info.next_size_limit + \"MB": "莞įŊŽé™åˆļ到 \" + site_info.next_size_limit + \"MB", - "Cloning site...": "克隆įĢ™į‚šä¸­...", - "Site cloned": "įĢ™į‚šåˇ˛å…‹éš†", - "Site size limit changed to {0}MB": "įĢ™į‚šå¤§å°é™åˆļåˇ˛æ›´æ”šåˆ° {0}MB", - " New version of this page has just released.
    Reload to see the modified content.": " æœŦéĄĩéĸįš„æ–°į‰ˆæœŦ厞įģå‘布。
    重新加čŊŊæĨæŸĨįœ‹æ›´æ”šåŽįš„å†…åŽšã€‚", - "This site requests permission:": "čŋ™ä¸ĒįĢ™į‚šįš„č¯ˇæą‚æƒé™īŧš", - "_(Accept)": "授权" - -} diff --git a/src/Ui/UiRequest.py b/src/Ui/UiRequest.py index 4a4e0545..674de84d 100644 --- a/src/Ui/UiRequest.py +++ b/src/Ui/UiRequest.py @@ -3,11 +3,7 @@ import re import os import mimetypes import json -import html -import urllib -import socket - -import gevent +import cgi from Config import config from Site import SiteManager @@ -15,7 +11,6 @@ from User import UserManager from Plugin import PluginManager from Ui.UiWebsocket import UiWebsocket from Crypt import CryptHash -from util import helper status_texts = { 200: "200 OK", @@ -26,27 +21,6 @@ status_texts = { 500: "500 Internal Server Error", } -content_types = { - "asc": "application/pgp-keys", - "css": "text/css", - "gpg": "application/pgp-encrypted", - "html": "text/html", - "js": "application/javascript", - "json": "application/json", - "oga": "audio/ogg", - "ogg": "application/ogg", - "ogv": "video/ogg", - "sig": "application/pgp-signature", - "txt": "text/plain", - "webmanifest": "application/manifest+json", - "wasm": "application/wasm", - "webp": "image/webp" -} - - -class SecurityError(Exception): - pass - @PluginManager.acceptPlugins class UiRequest(object): @@ -65,99 +39,23 @@ class UiRequest(object): self.start_response = start_response # Start response function self.user = None - self.script_nonce = None # Nonce for script tags in wrapper html - - def learnHost(self, host): - self.server.allowed_hosts.add(host) - self.server.log.info("Added %s as allowed host" % host) - - def isHostAllowed(self, host): - if host in self.server.allowed_hosts: - return True - - # Allow any IP address as they are not affected by DNS rebinding - # attacks - if helper.isIp(host): - self.learnHost(host) - return True - - if ":" in host and helper.isIp(host.rsplit(":", 1)[0]): # Test without port - self.learnHost(host) - return True - - if self.isProxyRequest(): # Support for chrome extension proxy - if self.isDomain(host): - return True - else: - return False - - return False - - def isDomain(self, address): - return self.server.site_manager.isDomainCached(address) - - def resolveDomain(self, domain): - return self.server.site_manager.resolveDomainCached(domain) # Call the request handler function base on path def route(self, path): - # Restict Ui access by ip - if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: + if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: # Restict Ui access by ip return self.error403(details=False) - # Check if host allowed to do request - if not self.isHostAllowed(self.env.get("HTTP_HOST")): - ret_error = next(self.error403("Invalid host: %s" % self.env.get("HTTP_HOST"), details=False)) - - http_get = self.env["PATH_INFO"] - if self.env["QUERY_STRING"]: - http_get += "?{0}".format(self.env["QUERY_STRING"]) - self_host = self.env["HTTP_HOST"].split(":")[0] - self_ip = self.env["HTTP_HOST"].replace(self_host, socket.gethostbyname(self_host)) - link = "http://{0}{1}".format(self_ip, http_get) - ret_body = """ -

    Start the client with --ui_host "{host}" argument

    -

    or access via ip: {link}

    - """.format( - host=html.escape(self.env["HTTP_HOST"]), - link=html.escape(link) - ).encode("utf8") - return iter([ret_error, ret_body]) - - # Prepend .bit host for transparent proxy - if self.isDomain(self.env.get("HTTP_HOST")): - path = re.sub("^/", "/" + self.env.get("HTTP_HOST") + "/", path) path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access - # Sanitize request url - path = path.replace("\\", "/") - if "../" in path or "./" in path: - return self.error403("Invalid path: %s" % path) - if self.env["REQUEST_METHOD"] == "OPTIONS": - if "/" not in path.strip("/"): - content_type = self.getContentType("index.html") - else: - content_type = self.getContentType(path) - - extra_headers = {"Access-Control-Allow-Origin": "null"} - - self.sendHeader(content_type=content_type, extra_headers=extra_headers, noscript=True) + self.sendHeader() return "" if path == "/": return self.actionIndex() - elif path in ("/favicon.ico", "/apple-touch-icon.png"): - return self.actionFile("src/Ui/media/img/%s" % path) - # Internal functions - elif "/ZeroNet-Internal/" in path: - path = re.sub(".*?/ZeroNet-Internal/", "/", path) - func = getattr(self, "action" + path.strip("/"), None) # Check if we have action+request_path function - if func: - return func() - else: - return self.error404(path) + elif path.endswith("favicon.ico"): + return self.actionFile("src/Ui/media/img/favicon.ico") # Media elif path.startswith("/uimedia/"): return self.actionUiMedia(path) @@ -165,6 +63,8 @@ class UiRequest(object): # uimedia within site dir (for chrome extension) path = re.sub(".*?/uimedia/", "/uimedia/", path) return self.actionUiMedia(path) + elif path.startswith("/media"): + return self.actionSiteMedia(path) # Websocket elif path == "/Websocket": return self.actionWebsocket() @@ -173,64 +73,42 @@ class UiRequest(object): return self.actionDebug() elif path == "/Console" and config.debug: return self.actionConsole() - # Wrapper-less static files - elif path.startswith("/raw/"): - return self.actionSiteMedia(path.replace("/raw", "/media", 1), header_noscript=True) - - elif path.startswith("/add/"): - return self.actionSiteAdd() # Site media wrapper else: if self.get.get("wrapper_nonce"): - if self.get["wrapper_nonce"] in self.server.wrapper_nonces: - self.server.wrapper_nonces.remove(self.get["wrapper_nonce"]) - return self.actionSiteMedia("/media" + path) # Only serve html files with frame - else: - self.server.log.warning("Invalid wrapper nonce: %s" % self.get["wrapper_nonce"]) - body = self.actionWrapper(path) + return self.actionSiteMedia("/media" + path) # Only serve html files with frame else: body = self.actionWrapper(path) if body: return body else: - func = getattr(self, "action" + path.strip("/"), None) # Check if we have action+request_path function + func = getattr(self, "action" + path.lstrip("/"), None) # Check if we have action+request_path function if func: return func() else: - ret = self.error404(path) - return ret + return self.error404(path) - # The request is proxied by chrome extension or a transparent proxy + # The request is proxied by chrome extension def isProxyRequest(self): - return self.env["PATH_INFO"].startswith("http://") or (self.server.allow_trans_proxy and self.isDomain(self.env.get("HTTP_HOST"))) - - def isWebSocketRequest(self): - return self.env.get("HTTP_UPGRADE") == "websocket" + return self.env["PATH_INFO"].startswith("http://") def isAjaxRequest(self): return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest" # Get mime by filename def getContentType(self, file_name): - file_name = file_name.lower() - ext = file_name.rsplit(".", 1)[-1] - - if ext in content_types: - content_type = content_types[ext] - elif ext in ("ttf", "woff", "otf", "woff2", "eot", "sfnt", "collection"): - content_type = "font/%s" % ext - else: - content_type = mimetypes.guess_type(file_name)[0] - + content_type = mimetypes.guess_type(file_name)[0] if not content_type: - content_type = "application/octet-stream" - - return content_type.lower() + if file_name.endswith("json"): # Correct json header + content_type = "application/json" + else: + content_type = "application/octet-stream" + return content_type # Return: Posted variables def getPosted(self): if self.env['REQUEST_METHOD'] == "POST": - return dict(urllib.parse.parse_qsl( + return dict(cgi.parse_qsl( self.env['wsgi.input'].readline().decode() )) else: @@ -240,7 +118,7 @@ class UiRequest(object): def getCookies(self): raw_cookies = self.env.get('HTTP_COOKIE') if raw_cookies: - cookies = urllib.parse.parse_qsl(raw_cookies) + cookies = cgi.parse_qsl(raw_cookies) return {key.strip(): val for key, val in cookies} else: return {} @@ -253,339 +131,147 @@ class UiRequest(object): self.user = UserManager.user_manager.create() return self.user - def getRequestUrl(self): - if self.isProxyRequest(): - if self.env["PATH_INFO"].startswith("http://zero/"): - return self.env["PATH_INFO"] - else: # Add http://zero to direct domain access - return self.env["PATH_INFO"].replace("http://", "http://zero/", 1) - else: - return self.env["wsgi.url_scheme"] + "://" + self.env["HTTP_HOST"] + self.env["PATH_INFO"] - - def getReferer(self): - referer = self.env.get("HTTP_REFERER") - if referer and self.isProxyRequest() and not referer.startswith("http://zero/"): - return referer.replace("http://", "http://zero/", 1) - else: - return referer - - def isScriptNonceSupported(self): - user_agent = self.env.get("HTTP_USER_AGENT") - if "Edge/" in user_agent: - is_script_nonce_supported = False - elif "Safari/" in user_agent and "Chrome/" not in user_agent: - is_script_nonce_supported = False - else: - is_script_nonce_supported = True - return is_script_nonce_supported - # Send response headers - def sendHeader(self, status=200, content_type="text/html", noscript=False, allow_ajax=False, script_nonce=None, extra_headers=[]): - headers = {} - headers["Version"] = "HTTP/1.1" - headers["Connection"] = "Keep-Alive" - headers["Keep-Alive"] = "max=25, timeout=30" - headers["X-Frame-Options"] = "SAMEORIGIN" - if content_type != "text/html" and self.env.get("HTTP_REFERER") and self.isSameOrigin(self.getReferer(), self.getRequestUrl()): - headers["Access-Control-Allow-Origin"] = "*" # Allow load font files from css - - if noscript: - headers["Content-Security-Policy"] = "default-src 'none'; sandbox allow-top-navigation allow-forms; img-src *; font-src * data:; media-src *; style-src * 'unsafe-inline';" - elif script_nonce and self.isScriptNonceSupported(): - headers["Content-Security-Policy"] = "default-src 'none'; script-src 'nonce-{0}'; img-src 'self' blob: data:; style-src 'self' blob: 'unsafe-inline'; connect-src *; frame-src 'self' blob:".format(script_nonce) - - if allow_ajax: - headers["Access-Control-Allow-Origin"] = "null" - + def sendHeader(self, status=200, content_type="text/html", extra_headers=[]): + if content_type == "text/html": + content_type = "text/html; charset=utf-8" + headers = [] + headers.append(("Version", "HTTP/1.1")) + headers.append(("Connection", "Keep-Alive")) + headers.append(("Keep-Alive", "max=25, timeout=30")) + headers.append(("Access-Control-Allow-Origin", "*")) # Allow json access if self.env["REQUEST_METHOD"] == "OPTIONS": # Allow json access - headers["Access-Control-Allow-Headers"] = "Origin, X-Requested-With, Content-Type, Accept, Cookie, Range" - headers["Access-Control-Allow-Credentials"] = "true" - - # Download instead of display file types that can be dangerous - if re.findall("/svg|/xml|/x-shockwave-flash|/pdf", content_type): - headers["Content-Disposition"] = "attachment" + headers.append(("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept, Cookie")) + headers.append(("Access-Control-Allow-Credentials", "true")) cacheable_type = ( - self.env["REQUEST_METHOD"] == "OPTIONS" or - content_type.split("/", 1)[0] in ("image", "video", "font") or - content_type in ("application/javascript", "text/css") + content_type == "text/css" or content_type.startswith("image") or content_type.startswith("video") or + self.env["REQUEST_METHOD"] == "OPTIONS" or content_type == "application/javascript" ) - if content_type in ("text/plain", "text/html", "text/css", "application/javascript", "application/json", "application/manifest+json"): - content_type += "; charset=utf-8" - if status in (200, 206) and cacheable_type: # Cache Css, Js, Image files for 10min - headers["Cache-Control"] = "public, max-age=600" # Cache 10 min + headers.append(("Cache-Control", "public, max-age=600")) # Cache 10 min else: - headers["Cache-Control"] = "no-cache, no-store, private, must-revalidate, max-age=0" # No caching at all - headers["Content-Type"] = content_type - headers.update(extra_headers) - return self.start_response(status_texts[status], list(headers.items())) + headers.append(("Cache-Control", "no-cache, no-store, private, must-revalidate, max-age=0")) # No caching at all + headers.append(("Content-Type", content_type)) + for extra_header in extra_headers: + headers.append(extra_header) + return self.start_response(status_texts[status], headers) # Renders a template def render(self, template_path, *args, **kwargs): - template = open(template_path, encoding="utf8").read() - - def renderReplacer(m): - if m.group(1) in kwargs: - return "%s" % kwargs.get(m.group(1), "") - else: - return m.group(0) - - template_rendered = re.sub("{(.*?)}", renderReplacer, template) - - return template_rendered.encode("utf8") - - def isWrapperNecessary(self, path): - match = re.match(r"/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) - - if not match: - return True - - inner_path = match.group("inner_path").lstrip("/") - if not inner_path or path.endswith("/"): # It's a directory - content_type = self.getContentType("index.html") - else: # It's a file - content_type = self.getContentType(inner_path) - - is_html_file = "html" in content_type or "xhtml" in content_type - - return is_html_file - - @helper.encodeResponse - def formatRedirect(self, url): - return """ - - - Redirecting to {0} - - - - """.format(html.escape(url)) + template = open(template_path).read().decode("utf8") + return template.format(**kwargs).encode("utf8") # - Actions - # Redirect to an url def actionRedirect(self, url): - self.start_response('301 Redirect', [('Location', str(url))]) - yield self.formatRedirect(url) + self.start_response('301 Redirect', [('Location', url)]) + yield "Location changed: %s" % url def actionIndex(self): - return self.actionRedirect("/" + config.homepage + "/") + return self.actionRedirect("/" + config.homepage) # Render a file from media with iframe site wrapper def actionWrapper(self, path, extra_headers=None): if not extra_headers: - extra_headers = {} - script_nonce = self.getScriptNonce() + extra_headers = [] - match = re.match(r"/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) - just_added = False + match = re.match("/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) if match: address = match.group("address") inner_path = match.group("inner_path").lstrip("/") - - if not self.isWrapperNecessary(path): - return self.actionSiteMedia("/media" + path) # Serve non-html files without wrapper - - if self.isAjaxRequest(): + if "." in inner_path and not inner_path.endswith(".html"): + return self.actionSiteMedia("/media" + path) # Only serve html files with frame + if self.env.get("HTTP_X_REQUESTED_WITH"): return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper - if self.isWebSocketRequest(): - return self.error403("WebSocket request not allowed to load wrapper") # No websocket - - if "text/html" not in self.env.get("HTTP_ACCEPT", ""): - return self.error403("Invalid Accept header to load wrapper: %s" % self.env.get("HTTP_ACCEPT", "")) - if "prefetch" in self.env.get("HTTP_X_MOZ", "") or "prefetch" in self.env.get("HTTP_PURPOSE", ""): - return self.error403("Prefetch not allowed to load wrapper") - site = SiteManager.site_manager.get(address) - if site and site.content_manager.contents.get("content.json"): + if ( + site and site.content_manager.contents.get("content.json") and + (not site.getReachableBadFiles() or site.settings["own"]) + ): # Its downloaded or own title = site.content_manager.contents["content.json"]["title"] else: title = "Loading %s..." % address - site = SiteManager.site_manager.get(address) - if site: # Already added, but not downloaded - if time.time() - site.announcer.time_last_announce > 5: - site.log.debug("Reannouncing site...") - gevent.spawn(site.update, announce=True) - else: # If not added yet - site = SiteManager.site_manager.need(address) - just_added = True + site = SiteManager.site_manager.need(address) # Start download site if not site: return False - self.sendHeader(extra_headers=extra_headers, script_nonce=script_nonce) - - min_last_announce = (time.time() - site.announcer.time_last_announce) / 60 - if min_last_announce > 60 and site.isServing() and not just_added: - site.log.debug("Site requested, but not announced recently (last %.0fmin ago). Updating..." % min_last_announce) - gevent.spawn(site.update, announce=True) - - return iter([self.renderWrapper(site, path, inner_path, title, extra_headers, script_nonce=script_nonce)]) - # Make response be sent at once (see https://github.com/HelloZeroNet/ZeroNet/issues/1092) + self.sendHeader(extra_headers=extra_headers[:]) + return iter([self.renderWrapper(site, path, inner_path, title, extra_headers)]) + # Dont know why wrapping with iter necessary, but without it around 100x slower else: # Bad url return False - def getSiteUrl(self, address): - if self.isProxyRequest(): - return "http://zero/" + address - else: - return "/" + address - - def getWsServerUrl(self): - if self.isProxyRequest(): - if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1 - server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"] - else: # Remote client, use SERVER_NAME as server's real address - server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"]) - else: - server_url = "" - return server_url - - def processQueryString(self, site, query_string): - match = re.search("zeronet_peers=(.*?)(&|$)", query_string) - if match: - query_string = query_string.replace(match.group(0), "") - num_added = 0 - for peer in match.group(1).split(","): - if not re.match(".*?:[0-9]+$", peer): - continue - ip, port = peer.rsplit(":", 1) - if site.addPeer(ip, int(port), source="query_string"): - num_added += 1 - site.log.debug("%s peers added by query string" % num_added) - - return query_string - - def renderWrapper(self, site, path, inner_path, title, extra_headers, show_loadingscreen=None, script_nonce=None): + def renderWrapper(self, site, path, inner_path, title, extra_headers): file_inner_path = inner_path if not file_inner_path: file_inner_path = "index.html" # If inner path defaults to index.html - if file_inner_path.endswith("/"): - file_inner_path = file_inner_path + "index.html" - address = re.sub("/.*", "", path.lstrip("/")) if self.isProxyRequest() and (not path or "/" in path[1:]): - if self.env["HTTP_HOST"] == "zero": - root_url = "/" + address + "/" - file_url = "/" + address + "/" + inner_path - else: - file_url = "/" + inner_path - root_url = "/" - + file_url = re.sub(".*/", "", inner_path) else: file_url = "/" + address + "/" + inner_path - root_url = "/" + address + "/" - - if self.isProxyRequest(): - self.server.allowed_ws_origins.add(self.env["HTTP_HOST"]) # Wrapper variable inits + query_string = "" body_style = "" meta_tags = "" - postmessage_nonce_security = "false" wrapper_nonce = self.getWrapperNonce() - inner_query_string = self.processQueryString(site, self.env.get("QUERY_STRING", "")) - if "?" in inner_path: - sep = "&" + if self.env.get("QUERY_STRING"): + query_string = "?%s&wrapper_nonce=%s" % (self.env["QUERY_STRING"], wrapper_nonce) else: - sep = "?" - - if inner_query_string: - inner_query_string = "%s%s&wrapper_nonce=%s" % (sep, inner_query_string, wrapper_nonce) - else: - inner_query_string = "%swrapper_nonce=%s" % (sep, wrapper_nonce) + query_string = "?wrapper_nonce=%s" % wrapper_nonce if self.isProxyRequest(): # Its a remote proxy request + if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1 + server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"] + else: # Remote client, use SERVER_NAME as server's real address + server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"]) homepage = "http://zero/" + config.homepage else: # Use relative path + server_url = "" homepage = "/" + config.homepage - server_url = self.getWsServerUrl() # Real server url for WS connections - - user = self.getCurrentUser() - if user: - theme = user.settings.get("theme", "light") - else: - theme = "light" - - themeclass = "theme-%-6s" % re.sub("[^a-z]", "", theme) - if site.content_manager.contents.get("content.json"): # Got content.json content = site.content_manager.contents["content.json"] if content.get("background-color"): - background_color = content.get("background-color-%s" % theme, content["background-color"]) - body_style += "background-color: %s;" % html.escape(background_color) + body_style += "background-color: %s;" % \ + cgi.escape(site.content_manager.contents["content.json"]["background-color"], True) if content.get("viewport"): - meta_tags += '' % html.escape(content["viewport"]) - if content.get("favicon"): - meta_tags += '' % (root_url, html.escape(content["favicon"])) - if content.get("postmessage_nonce_security"): - postmessage_nonce_security = "true" + meta_tags += '' % cgi.escape(content["viewport"], True) - sandbox_permissions = "" - - if "NOSANDBOX" in site.settings["permissions"]: - sandbox_permissions += " allow-same-origin" - - if show_loadingscreen is None: - show_loadingscreen = not site.storage.isFile(file_inner_path) - - if show_loadingscreen: - meta_tags += ''; - - def xescape(s): - '''combines parts from re.escape & html.escape''' - # https://github.com/python/cpython/blob/3.10/Lib/re.py#L267 - # '&' is handled otherwise - re_chars = {i: '\\' + chr(i) for i in b'()[]{}*+-|^$\\.~# \t\n\r\v\f'} - # https://github.com/python/cpython/blob/3.10/Lib/html/__init__.py#L12 - html_chars = { - '<' : '<', - '>' : '>', - '"' : '"', - "'" : ''', - } - # we can't replace '&' because it makes certain zites work incorrectly - # it should however in no way interfere with re.sub in render - repl = {} - repl.update(re_chars) - repl.update(html_chars) - return s.translate(repl) + if site.settings.get("own"): + sandbox_permissions = "allow-modals" # For coffeescript compile errors + else: + sandbox_permissions = "" return self.render( "src/Ui/template/wrapper.html", server_url=server_url, inner_path=inner_path, - file_url=xescape(file_url), - file_inner_path=xescape(file_inner_path), + file_url=file_url, + file_inner_path=file_inner_path, address=site.address, - title=xescape(title), + title=title, body_style=body_style, meta_tags=meta_tags, - query_string=xescape(inner_query_string), + query_string=query_string, wrapper_key=site.settings["wrapper_key"], - ajax_key=site.settings["ajax_key"], - wrapper_nonce=wrapper_nonce, - postmessage_nonce_security=postmessage_nonce_security, permissions=json.dumps(site.settings["permissions"]), - show_loadingscreen=json.dumps(show_loadingscreen), + show_loadingscreen=json.dumps(not site.storage.isFile(file_inner_path)), sandbox_permissions=sandbox_permissions, rev=config.rev, - lang=config.language, - homepage=homepage, - themeclass=themeclass, - script_nonce=script_nonce + homepage=homepage ) # Create a new wrapper nonce that allows to get one html file without the wrapper @@ -594,313 +280,178 @@ class UiRequest(object): self.server.wrapper_nonces.append(wrapper_nonce) return wrapper_nonce - def getScriptNonce(self): - if not self.script_nonce: - self.script_nonce = CryptHash.random(encoding="base64") - - return self.script_nonce - - # Create a new wrapper nonce that allows to get one site - def getAddNonce(self): - add_nonce = CryptHash.random() - self.server.add_nonces.append(add_nonce) - return add_nonce - - def isSameOrigin(self, url_a, url_b): - if not url_a or not url_b: + # Returns if media request allowed from that referer + def isMediaRequestAllowed(self, site_address, referer): + if not re.sub("^http[s]{0,1}://", "", referer).startswith(self.env["HTTP_HOST"]): return False + referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address + return referer_path.startswith("/" + site_address) - url_a = url_a.replace("/raw/", "/") - url_b = url_b.replace("/raw/", "/") - - origin_pattern = "http[s]{0,1}://(.*?/.*?/).*" - is_origin_full = re.match(origin_pattern, url_a) - if not is_origin_full: # Origin looks trimmed to host, require only same host - origin_pattern = "http[s]{0,1}://(.*?/).*" - - origin_a = re.sub(origin_pattern, "\\1", url_a) - origin_b = re.sub(origin_pattern, "\\1", url_b) - - return origin_a == origin_b - - # Return {address: 1Site.., inner_path: /data/users.json} from url path - def parsePath(self, path): - path = path.replace("\\", "/") + # Serve a media for site + def actionSiteMedia(self, path): path = path.replace("/index.html/", "/") # Base Backward compatibility fix if path.endswith("/"): path = path + "index.html" - if "../" in path or "./" in path: - raise SecurityError("Invalid path") + match = re.match("/media/(?P
    [A-Za-z0-9\._-]+)/(?P.*)", path) - match = re.match(r"/media/(?P
    [A-Za-z0-9]+[A-Za-z0-9\._-]+)(?P/.*|$)", path) - if match: - path_parts = match.groupdict() - if self.isDomain(path_parts["address"]): - path_parts["address"] = self.resolveDomain(path_parts["address"]) - path_parts["request_address"] = path_parts["address"] # Original request address (for Merger sites) - path_parts["inner_path"] = path_parts["inner_path"].lstrip("/") - if not path_parts["inner_path"]: - path_parts["inner_path"] = "index.html" - return path_parts - else: - return None + # Check wrapper nonce + content_type = self.getContentType(path) + if "htm" in content_type: # Valid nonce must present to render html files + wrapper_nonce = self.get.get("wrapper_nonce") + if wrapper_nonce not in self.server.wrapper_nonces: + return self.error403("Wrapper nonce error. Please reload the page.") + self.server.wrapper_nonces.remove(self.get["wrapper_nonce"]) - # Serve a media for site - def actionSiteMedia(self, path, header_length=True, header_noscript=False): - try: - path_parts = self.parsePath(path) - except SecurityError as err: - return self.error403(err) + referer = self.env.get("HTTP_REFERER") + if referer and match: # Only allow same site to receive media + if not self.isMediaRequestAllowed(match.group("address"), referer): + return self.error403("Media referrer error") # Referrer not starts same address as requested path - if not path_parts: + if match: # Looks like a valid path + address = match.group("address") + file_path = "%s/%s/%s" % (config.data_dir, address, match.group("inner_path")) + allowed_dir = os.path.abspath("%s/%s" % (config.data_dir, address)) # Only files within data/sitehash allowed + data_dir = os.path.abspath("data") # No files from data/ allowed + if ( + ".." in file_path + or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir) + or allowed_dir == data_dir + ): # File not in allowed path + return self.error403() + else: + if config.debug and file_path.split("/")[-1].startswith("all."): + # If debugging merge *.css to all.css and *.js to all.js + site = self.server.sites.get(address) + if site.settings["own"]: + from Debug import DebugMedia + DebugMedia.merge(file_path) + if os.path.isfile(file_path): # File exits + return self.actionFile(file_path) + else: # File not exits, try to download + site = SiteManager.site_manager.need(address, all_file=False) + result = site.needFile(match.group("inner_path"), priority=5) # Wait until file downloads + if result: + return self.actionFile(file_path) + else: + self.log.debug("File not found: %s" % match.group("inner_path")) + # Site larger than allowed, re-add wrapper nonce to allow reload + if site.settings.get("size", 0) > site.getSizeLimit() * 1024 * 1024: + self.server.wrapper_nonces.append(self.get.get("wrapper_nonce")) + return self.error404(match.group("inner_path")) + + else: # Bad url return self.error404(path) - address = path_parts["address"] - - file_path = "%s/%s/%s" % (config.data_dir, address, path_parts["inner_path"]) - - if (config.debug or config.merge_media) and file_path.split("/")[-1].startswith("all."): - # If debugging merge *.css to all.css and *.js to all.js - site = self.server.sites.get(address) - if site and site.settings["own"]: - from Debug import DebugMedia - DebugMedia.merge(file_path) - - if not address or address == ".": - return self.error403(path_parts["inner_path"]) - - header_allow_ajax = False - if self.get.get("ajax_key"): - site = SiteManager.site_manager.get(path_parts["request_address"]) - if self.get["ajax_key"] == site.settings["ajax_key"]: - header_allow_ajax = True - else: - return self.error403("Invalid ajax_key") - - file_size = helper.getFilesize(file_path) - - if file_size is not None: - return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts) - - elif os.path.isdir(file_path): # If this is actually a folder, add "/" and redirect - if path_parts["inner_path"]: - return self.actionRedirect("./%s/" % path_parts["inner_path"].split("/")[-1]) - else: - return self.actionRedirect("./%s/" % path_parts["address"]) - - else: # File not exists, try to download - if address not in SiteManager.site_manager.sites: # Only in case if site already started downloading - return self.actionSiteAddPrompt(path) - - site = SiteManager.site_manager.need(address) - - if path_parts["inner_path"].endswith("favicon.ico"): # Default favicon for all sites - return self.actionFile("src/Ui/media/img/favicon.ico") - - result = site.needFile(path_parts["inner_path"], priority=15) # Wait until file downloads - if result: - file_size = helper.getFilesize(file_path) - return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts) - else: - self.log.debug("File not found: %s" % path_parts["inner_path"]) - return self.error404(path) - # Serve a media for ui def actionUiMedia(self, path): match = re.match("/uimedia/(?P.*)", path) if match: # Looks like a valid path file_path = "src/Ui/media/%s" % match.group("inner_path") allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed - if "../" in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): + if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): # File not in allowed path return self.error403() else: - if (config.debug or config.merge_media) and match.group("inner_path").startswith("all."): + if config.debug and match.group("inner_path").startswith("all."): # If debugging merge *.css to all.css and *.js to all.js from Debug import DebugMedia DebugMedia.merge(file_path) - return self.actionFile(file_path, header_length=False) # Dont's send site to allow plugins append content - + return self.actionFile(file_path) else: # Bad url return self.error400() - def actionSiteAdd(self): - post_data = self.env["wsgi.input"].read().decode() - post = dict(urllib.parse.parse_qsl(post_data)) - if post["add_nonce"] not in self.server.add_nonces: - return self.error403("Add nonce error.") - self.server.add_nonces.remove(post["add_nonce"]) - SiteManager.site_manager.need(post["address"]) - return self.actionRedirect(post["url"]) - - @helper.encodeResponse - def actionSiteAddPrompt(self, path): - path_parts = self.parsePath(path) - if not path_parts or not self.server.site_manager.isAddress(path_parts["address"]): - return self.error404(path) - - self.sendHeader(200, "text/html", noscript=True) - template = open("src/Ui/template/site_add.html").read() - template = template.replace("{url}", html.escape(self.env["PATH_INFO"])) - template = template.replace("{address}", path_parts["address"]) - template = template.replace("{add_nonce}", self.getAddNonce()) - return template - - def replaceHtmlVariables(self, block, path_parts): - user = self.getCurrentUser() - if user and user.settings: - themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light")) - else: - themeclass = "theme-light" - block = block.replace(b"{themeclass}", themeclass.encode("utf8")) - - if path_parts: - site = self.server.sites.get(path_parts.get("address")) - if site.settings["own"]: - modified = int(time.time()) - else: - modified = int(site.content_manager.contents["content.json"]["modified"]) - block = block.replace(b"{site_modified}", str(modified).encode("utf8")) - - return block - # Stream a file to client - def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_length=True, header_noscript=False, header_allow_ajax=False, extra_headers={}, file_size=None, file_obj=None, path_parts=None): - file_name = os.path.basename(file_path) - - if file_size is None: - file_size = helper.getFilesize(file_path) - - if file_size is not None: + def actionFile(self, file_path, block_size=64 * 1024, send_header=True): + if os.path.isfile(file_path): # Try to figure out content type by extension - content_type = self.getContentType(file_name) + content_type = self.getContentType(file_path) + # TODO: Dont allow external access: extra_headers= + # [("Content-Security-Policy", "default-src 'unsafe-inline' data: http://localhost:43110 ws://localhost:43110")] range = self.env.get("HTTP_RANGE") range_start = None - - is_html_file = file_name.endswith(".html") - if is_html_file: - header_length = False - if send_header: - extra_headers = extra_headers.copy() - content_encoding = self.get.get("zeronet_content_encoding", "") - if all(part.strip() in ("gzip", "compress", "deflate", "identity", "br") for part in content_encoding.split(",")): - extra_headers["Content-Encoding"] = content_encoding + extra_headers = {} + file_size = os.path.getsize(file_path) extra_headers["Accept-Ranges"] = "bytes" - if header_length: - extra_headers["Content-Length"] = str(file_size) if range: range_start = int(re.match(".*?([0-9]+)", range).group(1)) if re.match(".*?-([0-9]+)", range): - range_end = int(re.match(".*?-([0-9]+)", range).group(1)) + 1 + range_end = int(re.match(".*?-([0-9]+)", range).group(1))+1 else: range_end = file_size extra_headers["Content-Length"] = str(range_end - range_start) - extra_headers["Content-Range"] = "bytes %s-%s/%s" % (range_start, range_end - 1, file_size) + extra_headers["Content-Range"] = "bytes %s-%s/%s" % (range_start, range_end-1, file_size) if range: status = 206 else: status = 200 - self.sendHeader(status, content_type=content_type, noscript=header_noscript, allow_ajax=header_allow_ajax, extra_headers=extra_headers) + self.sendHeader(status, content_type=content_type, extra_headers=extra_headers.items()) if self.env["REQUEST_METHOD"] != "OPTIONS": - if not file_obj: - file_obj = open(file_path, "rb") - + file = open(file_path, "rb") if range_start: - file_obj.seek(range_start) + file.seek(range_start) while 1: try: - block = file_obj.read(block_size) - if is_html_file: - block = self.replaceHtmlVariables(block, path_parts) + block = file.read(block_size) if block: yield block else: raise StopIteration except StopIteration: - file_obj.close() + file.close() break - else: # File not exists - for part in self.error404(str(file_path)): - yield part + else: # File not exits + yield self.error404(file_path) # On websocket connection def actionWebsocket(self): ws = self.env.get("wsgi.websocket") - if ws: - # Allow only same-origin websocket requests - origin = self.env.get("HTTP_ORIGIN") - host = self.env.get("HTTP_HOST") - # Allow only same-origin websocket requests - if origin: - origin_host = origin.split("://", 1)[-1] - if origin_host != host and origin_host not in self.server.allowed_ws_origins: - error_message = "Invalid origin: %s (host: %s, allowed: %s)" % (origin, host, self.server.allowed_ws_origins) - ws.send(json.dumps({"error": error_message})) - return self.error403(error_message) - - # Find site by wrapper_key wrapper_key = self.get["wrapper_key"] + # Find site by wrapper_key site = None - for site_check in list(self.server.sites.values()): + for site_check in self.server.sites.values(): if site_check.settings["wrapper_key"] == wrapper_key: site = site_check if site: # Correct wrapper key - try: - user = self.getCurrentUser() - except Exception as err: - ws.send(json.dumps({"error": "Error in data/user.json: %s" % err})) - return self.error500("Error in data/user.json: %s" % err) + user = self.getCurrentUser() if not user: - ws.send(json.dumps({"error": "No user found"})) - return self.error403("No user found") + self.log.error("No user found") + return self.error403() ui_websocket = UiWebsocket(ws, site, self.server, user, self) site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events - self.server.websockets.append(ui_websocket) ui_websocket.start() - self.server.websockets.remove(ui_websocket) - for site_check in list(self.server.sites.values()): + for site_check in self.server.sites.values(): # Remove websocket from every site (admin sites allowed to join other sites event channels) if ui_websocket in site_check.websockets: site_check.websockets.remove(ui_websocket) - return [b"Bye."] + return "Bye." else: # No site found by wrapper key - ws.send(json.dumps({"error": "Wrapper key not found: %s" % wrapper_key})) - return self.error403("Wrapper key not found: %s" % wrapper_key) + self.log.error("Wrapper key not found: %s" % wrapper_key) + return self.error403() else: self.start_response("400 Bad Request", []) - return [b"Not a websocket request!"] + return "Not a websocket!" # Debug last error def actionDebug(self): # Raise last error from DebugHook - import main - last_error = main.DebugHook.last_error + import sys + last_error = sys.modules["main"].DebugHook.last_error if last_error: - raise last_error[0](last_error[1]).with_traceback(last_error[2]) + raise last_error[0], last_error[1], last_error[2] else: self.sendHeader() - return [b"No error! :)"] + return "No error! :)" # Just raise an error to get console def actionConsole(self): import sys sites = self.server.sites main = sys.modules["main"] - - def bench(code, times=100, init=None): - sites = self.server.sites - main = sys.modules["main"] - s = time.time() - if init: - eval(compile(init, '', 'exec'), globals(), locals()) - for _ in range(times): - back = eval(code, globals(), locals()) - return ["%s run: %.3fs" % (times, time.time() - s), back] raise Exception("Here is your console") # - Tests - @@ -918,34 +469,30 @@ class UiRequest(object): # Send bad request error def error400(self, message=""): - self.sendHeader(400, noscript=True) - self.log.error("Error 400: %s" % message) + self.sendHeader(400) return self.formatError("Bad Request", message) # You are not allowed to access this def error403(self, message="", details=True): - self.sendHeader(403, noscript=True) - self.log.warning("Error 403: %s" % message) + self.sendHeader(403) return self.formatError("Forbidden", message, details=details) # Send file not found error def error404(self, path=""): - self.sendHeader(404, noscript=True) - return self.formatError("Not Found", path, details=False) + self.sendHeader(404) + return self.formatError("Not Found", path.encode("utf8"), details=False) # Internal server error def error500(self, message=":("): - self.sendHeader(500, noscript=True) - self.log.error("Error 500: %s" % message) - return self.formatError("Server error", message) + self.sendHeader(500) + return self.formatError("Server error", cgi.escape(message)) - @helper.encodeResponse def formatError(self, title, message, details=True): import sys import gevent - if details and config.debug: - details = {key: val for key, val in list(self.env.items()) if hasattr(val, "endswith") and "COOKIE" not in key} + if details: + details = {key: val for key, val in self.env.items() if hasattr(val, "endswith") and "COOKIE" not in key} details["version_zeronet"] = "%s r%s" % (config.version, config.rev) details["version_python"] = sys.version details["version_gevent"] = gevent.__version__ @@ -959,16 +506,22 @@ class UiRequest(object):

    %s

    %s

    -

    Please report it if you think this an error.

    +

    Please report it if you think this an error.

    Details:

    %s
    - """ % (title, html.escape(message), html.escape(json.dumps(details, indent=4, sort_keys=True))) + """ % (title, message, json.dumps(details, indent=4, sort_keys=True)) else: return """ -

    %s

    %s

    - """ % (title, html.escape(message)) + """ % (title, message) + + +# - Reload for eaiser developing - +# def reload(): + # import imp, sys + # global UiWebsocket + # UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket + # reload(sys.modules["User.UserManager"]) + # UserManager.reloadModule() + # self.user = UserManager.user_manager.getCurrent() diff --git a/src/Ui/UiServer.py b/src/Ui/UiServer.py index 61943ada..9c9592d4 100644 --- a/src/Ui/UiServer.py +++ b/src/Ui/UiServer.py @@ -1,21 +1,21 @@ import logging import time -import urllib +import cgi import socket -import gevent +import sys from gevent.pywsgi import WSGIServer -from lib.gevent_ws import WebSocketHandler +from gevent.pywsgi import WSGIHandler +from lib.geventwebsocket.handler import WebSocketHandler -from .UiRequest import UiRequest +from UiRequest import UiRequest from Site import SiteManager from Config import config from Debug import Debug -import importlib # Skip websocket handler if not necessary -class UiWSGIHandler(WebSocketHandler): +class UiWSGIHandler(WSGIHandler): def __init__(self, *args, **kwargs): self.server = args[2] @@ -23,25 +23,25 @@ class UiWSGIHandler(WebSocketHandler): self.args = args self.kwargs = kwargs - def handleError(self, err): - if config.debug: # Allow websocket errors to appear on /Debug - import main - main.DebugHook.handleError() - else: - ui_request = UiRequest(self.server, {}, self.environ, self.start_response) - block_gen = ui_request.error500("UiWSGIHandler error: %s" % Debug.formatExceptionMessage(err)) - for block in block_gen: - self.write(block) - def run_application(self): - err_name = "UiWSGIHandler websocket" if "HTTP_UPGRADE" in self.environ else "UiWSGIHandler" - try: - super(UiWSGIHandler, self).run_application() - except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as err: - logging.warning("%s connection error: %s" % (err_name, err)) - except Exception as err: - logging.warning("%s error: %s" % (err_name, Debug.formatException(err))) - self.handleError(err) + if "HTTP_UPGRADE" in self.environ: # Websocket request + try: + ws_handler = WebSocketHandler(*self.args, **self.kwargs) + ws_handler.__dict__ = self.__dict__ # Match class variables + ws_handler.run_application() + except Exception, err: + logging.error("UiWSGIHandler websocket error: %s" % Debug.formatException(err)) + if config.debug: # Allow websocket errors to appear on /Debug + import sys + sys.modules["main"].DebugHook.handleError() + else: # Standard HTTP request + try: + super(UiWSGIHandler, self).run_application() + except Exception, err: + logging.error("UiWSGIHandler error: %s" % Debug.formatException(err)) + if config.debug: # Allow websocket errors to appear on /Debug + import sys + sys.modules["main"].DebugHook.handleError() def handle(self): # Save socket to be able to close them properly on exit @@ -51,53 +51,21 @@ class UiWSGIHandler(WebSocketHandler): class UiServer: + def __init__(self): self.ip = config.ui_ip self.port = config.ui_port - self.running = False if self.ip == "*": - self.ip = "0.0.0.0" # Bind all - if config.ui_host: - self.allowed_hosts = set(config.ui_host) - #TODO: For proxies allow sub domains(www) as valid hosts, should be user preference. - elif config.ui_ip == "127.0.0.1": - # IP Addresses are inherently allowed as they are immune to DNS - # rebinding attacks. - self.allowed_hosts = set(["zero", "localhost:%s" % config.ui_port]) - # "URI producers and normalizers should omit the port component and - # its ':' delimiter if port is empty or if its value would be the - # same as that of the scheme's default." - # Source: https://tools.ietf.org/html/rfc3986#section-3.2.3 - # As a result, we need to support portless hosts if port 80 is in - # use. - if config.ui_port == 80: - self.allowed_hosts.update(["localhost"]) - else: - self.allowed_hosts = set([]) - self.allowed_ws_origins = set() - self.allow_trans_proxy = config.ui_trans_proxy - + self.ip = "" # Bind all self.wrapper_nonces = [] - self.add_nonces = [] - self.websockets = [] - self.site_manager = SiteManager.site_manager self.sites = SiteManager.site_manager.list() self.log = logging.getLogger(__name__) - config.error_logger.onNewRecord = self.handleErrorLogRecord - - def handleErrorLogRecord(self, record): - self.updateWebsocket(log_event=record.levelname) - - # After WebUI started - def afterStarted(self): - from util import Platform - Platform.setMaxfilesopened(config.max_files_opened) # Handle WSGI request def handleRequest(self, env, start_response): - path = bytes(env["PATH_INFO"], "raw-unicode-escape").decode("utf8") + path = env["PATH_INFO"] if env.get("QUERY_STRING"): - get = dict(urllib.parse.parse_qsl(env['QUERY_STRING'])) + get = dict(cgi.parse_qsl(env['QUERY_STRING'])) else: get = {} ui_request = UiRequest(self, get, env, start_response) @@ -106,7 +74,7 @@ class UiServer: else: # Catch and display the error try: return ui_request.route(path) - except Exception as err: + except Exception, err: logging.debug("UiRequest error: %s" % Debug.formatException(err)) return ui_request.error500("Err: %s" % Debug.formatException(err)) @@ -115,93 +83,70 @@ class UiServer: global UiRequest import imp import sys - importlib.reload(sys.modules["User.UserManager"]) - importlib.reload(sys.modules["Ui.UiWebsocket"]) + reload(sys.modules["User.UserManager"]) + reload(sys.modules["Ui.UiWebsocket"]) UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest # UiRequest.reload() # Bind and run the server def start(self): - self.running = True handler = self.handleRequest if config.debug: # Auto reload UiRequest on change from Debug import DebugReloader - DebugReloader.watcher.addCallback(self.reload) + DebugReloader(self.reload) # Werkzeug Debugger try: from werkzeug.debug import DebuggedApplication handler = DebuggedApplication(self.handleRequest, evalex=True) - except Exception as err: + except Exception, err: self.log.info("%s: For debugging please download Werkzeug (http://werkzeug.pocoo.org/)" % err) from Debug import DebugReloader self.log.write = lambda msg: self.log.debug(msg.strip()) # For Wsgi access.log self.log.info("--------------------------------------") - if ":" in config.ui_ip: - self.log.info("Web interface: http://[%s]:%s/" % (config.ui_ip, config.ui_port)) - else: - self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port)) + self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port)) self.log.info("--------------------------------------") - if config.open_browser and config.open_browser != "False": + if config.open_browser: logging.info("Opening browser: %s...", config.open_browser) import webbrowser - try: - if config.open_browser == "default_browser": - browser = webbrowser.get() - else: - browser = webbrowser.get(config.open_browser) - url = "http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage) - gevent.spawn_later(0.3, browser.open, url, new=2) - except Exception as err: - print("Error starting browser: %s" % err) + if config.open_browser == "default_browser": + browser = webbrowser.get() + else: + browser = webbrowser.get(config.open_browser) + browser.open("http://%s:%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port), new=2) - self.server = WSGIServer((self.ip, self.port), handler, handler_class=UiWSGIHandler, log=self.log) + self.server = WSGIServer((self.ip.replace("*", ""), self.port), handler, handler_class=UiWSGIHandler, log=self.log) self.server.sockets = {} - self.afterStarted() try: self.server.serve_forever() - except Exception as err: + except Exception, err: self.log.error("Web interface bind error, must be running already, exiting.... %s" % err) - import main - main.file_server.stop() + sys.modules["main"].file_server.stop() self.log.debug("Stopped.") def stop(self): self.log.debug("Stopping...") # Close WS sockets if "clients" in dir(self.server): - for client in list(self.server.clients.values()): + for client in self.server.clients.values(): client.ws.close() # Close http sockets sock_closed = 0 - for sock in list(self.server.sockets.values()): + for sock in self.server.sockets.values(): try: - sock.send(b"bye") + sock.send("bye") sock.shutdown(socket.SHUT_RDWR) # sock._sock.close() # sock.close() sock_closed += 1 - except Exception as err: + except Exception, err: self.log.debug("Http connection close error: %s" % err) self.log.debug("Socket closed: %s" % sock_closed) time.sleep(0.1) - if config.debug: - from Debug import DebugReloader - DebugReloader.watcher.stop() self.server.socket.close() self.server.stop() - self.running = False time.sleep(1) - - def updateWebsocket(self, **kwargs): - if kwargs: - param = {"event": list(kwargs.items())[0]} - else: - param = None - - for ws in self.websockets: - ws.event("serverChanged", param) diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py index 2f982e1d..04f728fe 100644 --- a/src/Ui/UiWebsocket.py +++ b/src/Ui/UiWebsocket.py @@ -1,42 +1,31 @@ import json import time import sys -import os -import shutil -import re -import copy -import logging -import stat +import hashlib import gevent from Config import config from Site import SiteManager -from Crypt import CryptBitcoin from Debug import Debug from util import QueryJson, RateLimit from Plugin import PluginManager -from Translate import translate as _ -from util import helper -from util import SafeRe -from util.Flag import flag -from Content.ContentManager import VerifyError, SignError @PluginManager.acceptPlugins class UiWebsocket(object): + def __init__(self, ws, site, server, user, request): self.ws = ws self.site = site self.user = user self.log = site.log self.request = request - self.permissions = [] self.server = server self.next_message_id = 1 self.waiting_cb = {} # Waiting for callback. Key: message_id, Value: function pointer self.channels = [] # Channels joined to - self.state = {"sending": False} # Shared state of websocket connection + self.sending = False # Currently sending to client self.send_queue = [] # Messages to send to client # Start listener loop @@ -44,111 +33,51 @@ class UiWebsocket(object): ws = self.ws if self.site.address == config.homepage and not self.site.page_requested: # Add open fileserver port message or closed port error to homepage at first request after start - self.site.page_requested = True # Dont add connection notification anymore - import main - file_server = main.file_server - if not file_server.port_opened or file_server.tor_manager.start_onions is None: - self.site.page_requested = False # Not ready yet, check next time - else: - try: - self.addHomepageNotifications() - except Exception as err: - self.log.error("Uncaught Exception: " + Debug.formatException(err)) + if sys.modules["main"].file_server.port_opened is True: + self.site.notifications.append([ + "done", + "Congratulation, your port %s is opened.
    You are full member of ZeroNet network!" % + config.fileserver_port, + 10000 + ]) + elif sys.modules["main"].file_server.port_opened is False: + self.site.notifications.append([ + "error", + """ + Your network connection is restricted. Please, open %s port
    + on your router to become full member of ZeroNet network. + """ % config.fileserver_port, + 0 + ]) + self.site.page_requested = True # Dont add connection notification anymore for notification in self.site.notifications: # Send pending notification messages - # send via WebSocket self.cmd("notification", notification) - # just in case, log them to terminal - if notification[0] == "error": - self.log.error("\n*** %s\n" % self.dedent(notification[1])) - self.site.notifications = [] - while True: try: - if ws.closed: - break - else: - message = ws.receive() - except Exception as err: - self.log.error("WebSocket receive error: %s" % Debug.formatException(err)) - break + message = ws.receive() + except Exception, err: + return "Bye." # Close connection if message: try: - req = json.loads(message) - self.handleRequest(req) - except Exception as err: + self.handleRequest(message) + except Exception, err: if config.debug: # Allow websocket errors to appear on /Debug - import main - main.DebugHook.handleError() - self.log.error("WebSocket handleRequest error: %s \n %s" % (Debug.formatException(err), message)) - if not self.hasPlugin("Multiuser"): - self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html")) - - self.onClosed() - - def onClosed(self): - pass - - def dedent(self, text): - return re.sub("[\\r\\n\\x20\\t]+", " ", text.strip().replace("
    ", " ")) - - def addHomepageNotifications(self): - if not(self.hasPlugin("Multiuser")) and not(self.hasPlugin("UiPassword")): - bind_ip = getattr(config, "ui_ip", "") - whitelist = getattr(config, "ui_restrict", []) - # binds to the Internet, no IP whitelist, no UiPassword, no Multiuser - if ("0.0.0.0" == bind_ip or "*" == bind_ip) and (not whitelist): - self.site.notifications.append([ - "error", - _("You are not going to set up a public gateway. However, your Web UI is
    " + - "open to the whole Internet.
    " + - "Please check your configuration.") - ]) - - def hasPlugin(self, name): - return name in PluginManager.plugin_manager.plugin_names - - # Has permission to run the command - def hasCmdPermission(self, cmd): - flags = flag.db.get(self.getCmdFuncName(cmd), ()) - if "admin" in flags and "ADMIN" not in self.permissions: - return False - else: - return True - - # Has permission to access a site - def hasSitePermission(self, address, cmd=None): - if address != self.site.address and "ADMIN" not in self.site.settings["permissions"]: - return False - else: - return True - - def hasFilePermission(self, inner_path): - valid_signers = self.site.content_manager.getValidSigners(inner_path) - return self.site.settings["own"] or self.user.getAuthAddress(self.site.address) in valid_signers + sys.modules["main"].DebugHook.handleError() + self.log.error("WebSocket handleRequest error: %s" % Debug.formatException(err)) + self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html")) # Event in a channel def event(self, channel, *params): if channel in self.channels: # We are joined to channel if channel == "siteChanged": - site = params[0] - site_info = self.formatSiteInfo(site, create_user=False) + site = params[0] # Triggerer site + site_info = self.formatSiteInfo(site) if len(params) > 1 and params[1]: # Extra data site_info.update(params[1]) self.cmd("setSiteInfo", site_info) - elif channel == "serverChanged": - server_info = self.formatServerInfo() - if len(params) > 0 and params[0]: # Extra data - server_info.update(params[0]) - self.cmd("setServerInfo", server_info) - elif channel == "announcerChanged": - site = params[0] - announcer_info = self.formatAnnouncerInfo(site) - if len(params) > 1 and params[1]: # Extra data - announcer_info.update(params[1]) - self.cmd("setAnnouncerInfo", announcer_info) # Send response to client (to = message.id) def response(self, to, result): @@ -162,20 +91,19 @@ class UiWebsocket(object): def send(self, message, cb=None): message["id"] = self.next_message_id # Add message id to allow response self.next_message_id += 1 - if cb: # Callback after client responded + if cb: # Callback after client responsed self.waiting_cb[message["id"]] = cb - self.send_queue.append(message) - if self.state["sending"]: + if self.sending: return # Already sending + self.send_queue.append(message) try: while self.send_queue: - self.state["sending"] = True + self.sending = True message = self.send_queue.pop(0) self.ws.send(json.dumps(message)) - self.state["sending"] = False - except Exception as err: + self.sending = False + except Exception, err: self.log.debug("Websocket send error: %s" % Debug.formatException(err)) - self.state["sending"] = False def getPermissions(self, req_id): permissions = self.site.settings["permissions"] @@ -184,69 +112,43 @@ class UiWebsocket(object): permissions.append("ADMIN") return permissions - def asyncWrapper(self, func): - def asyncErrorWatcher(func, *args, **kwargs): - try: - result = func(*args, **kwargs) - if result is not None: - self.response(args[0], result) - except Exception as err: - if config.debug: # Allow websocket errors to appear on /Debug - import main - main.DebugHook.handleError() - self.log.error("WebSocket handleRequest error: %s" % Debug.formatException(err)) - self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html")) - - def wrapper(*args, **kwargs): - gevent.spawn(asyncErrorWatcher, func, *args, **kwargs) - return wrapper - - def getCmdFuncName(self, cmd): - func_name = "action" + cmd[0].upper() + cmd[1:] - return func_name - # Handle incoming messages - def handleRequest(self, req): + def handleRequest(self, data): + req = json.loads(data) cmd = req.get("cmd") params = req.get("params") - self.permissions = self.getPermissions(req["id"]) + permissions = self.getPermissions(req["id"]) + + admin_commands = ( + "sitePause", "siteResume", "siteDelete", "siteList", "siteSetLimit", "siteClone", + "channelJoinAllsite", "serverUpdate", "serverPortcheck", "certSet" + ) if cmd == "response": # It's a response to a command return self.actionResponse(req["to"], req["result"]) + elif cmd in admin_commands and "ADMIN" not in permissions: # Admin commands + return self.response(req["id"], {"error:", "You don't have permission to run %s" % cmd}) else: # Normal command - func_name = self.getCmdFuncName(cmd) + func_name = "action" + cmd[0].upper() + cmd[1:] func = getattr(self, func_name, None) - if self.site.settings.get("deleting"): - return self.response(req["id"], {"error": "Site is deleting"}) - if not func: # Unknown command - return self.response(req["id"], {"error": "Unknown command: %s" % cmd}) + self.response(req["id"], {"error": "Unknown command: %s" % cmd}) + return - if not self.hasCmdPermission(cmd): # Admin commands - return self.response(req["id"], {"error": "You don't have permission to run %s" % cmd}) - - # Execute in parallel - func_flags = flag.db.get(self.getCmdFuncName(cmd), ()) - if func_flags and "async_run" in func_flags: - func = self.asyncWrapper(func) - - # Support calling as named, unnamed parameters and raw first argument too + # Support calling as named, unnamed paramters and raw first argument too if type(params) is dict: - result = func(req["id"], **params) + func(req["id"], **params) elif type(params) is list: - result = func(req["id"], *params) + func(req["id"], *params) elif params: - result = func(req["id"], params) + func(req["id"], params) else: - result = func(req["id"]) - - if result is not None: - self.response(req["id"], result) + func(req["id"]) # Format site info def formatSiteInfo(self, site, create_user=True): - content = site.content_manager.contents.get("content.json", {}) + content = site.content_manager.contents.get("content.json") if content: # Remove unnecessary data transfer content = content.copy() content["files"] = len(content.get("files", {})) @@ -261,13 +163,14 @@ class UiWebsocket(object): settings = site.settings.copy() del settings["wrapper_key"] # Dont expose wrapper key + del settings["auth_key"] # Dont send auth key twice ret = { + "auth_key": self.site.settings["auth_key"], # Obsolete, will be removed + "auth_key_sha512": hashlib.sha512(self.site.settings["auth_key"]).hexdigest()[0:64], # Obsolete, will be removed "auth_address": self.user.getAuthAddress(site.address, create=create_user), "cert_user_id": self.user.getCertUserId(site.address), "address": site.address, - "address_short": site.address_short, - "address_hash": site.address_hash.hex(), "settings": settings, "content_updated": site.content_updated, "bad_files": len(site.bad_files), @@ -281,63 +184,26 @@ class UiWebsocket(object): } if site.settings["own"]: ret["privatekey"] = bool(self.user.getSiteData(site.address, create=create_user).get("privatekey")) - if site.isServing() and content: + if site.settings["serving"] and content: ret["peers"] += 1 # Add myself if serving return ret def formatServerInfo(self): - import main - file_server = main.file_server - if file_server.port_opened == {}: - ip_external = None - else: - ip_external = any(file_server.port_opened.values()) - back = { - "ip_external": ip_external, - "port_opened": file_server.port_opened, + return { + "ip_external": bool(sys.modules["main"].file_server.port_opened), "platform": sys.platform, "fileserver_ip": config.fileserver_ip, "fileserver_port": config.fileserver_port, - "tor_enabled": file_server.tor_manager.enabled, - "tor_status": file_server.tor_manager.status, - "tor_has_meek_bridges": file_server.tor_manager.has_meek_bridges, - "tor_use_bridges": config.tor_use_bridges, "ui_ip": config.ui_ip, "ui_port": config.ui_port, "version": config.version, "rev": config.rev, - "timecorrection": file_server.timecorrection, - "language": config.language, "debug": config.debug, - "offline": config.offline, - "plugins": PluginManager.plugin_manager.plugin_names, - "plugins_rev": PluginManager.plugin_manager.plugins_rev, - "user_settings": self.user.settings + "plugins": PluginManager.plugin_manager.plugin_names } - if "ADMIN" in self.site.settings["permissions"]: - back["updatesite"] = config.updatesite - back["dist_type"] = config.dist_type - back["lib_verify_best"] = CryptBitcoin.lib_verify_best - return back - - def formatAnnouncerInfo(self, site): - return {"address": site.address, "stats": site.announcer.stats} # - Actions - - def actionAs(self, to, address, cmd, params=[]): - if not self.hasSitePermission(address, cmd=cmd): - #TODO! Return this as error ? - return self.response(to, "No permission for site %s" % address) - if not self.server.sites.get(address): - return self.response(to, {"error": "Site Does Not Exist: %s" % address}) - req_self = copy.copy(self) - req_self.site = self.server.sites.get(address) - req_self.hasCmdPermission = self.hasCmdPermission # Use the same permissions as current site - req_obj = super(UiWebsocket, req_self) - req = {"id": to, "cmd": cmd, "params": params} - req_obj.handleRequest(req) - # Do callback on response {"cmd": "response", "to": message_id, "result": result} def actionResponse(self, to, result): if to in self.waiting_cb: @@ -353,133 +219,61 @@ class UiWebsocket(object): def actionSiteInfo(self, to, file_status=None): ret = self.formatSiteInfo(self.site) if file_status: # Client queries file status - if self.site.storage.isFile(file_status): # File exist, add event done + if self.site.storage.isFile(file_status): # File exits, add event done ret["event"] = ("file_done", file_status) self.response(to, ret) - def actionSiteBadFiles(self, to): - return list(self.site.bad_files.keys()) - # Join to an event channel - def actionChannelJoin(self, to, channels): - if type(channels) != list: - channels = [channels] - - for channel in channels: - if channel not in self.channels: - self.channels.append(channel) - - self.response(to, "ok") + def actionChannelJoin(self, to, channel): + if channel not in self.channels: + self.channels.append(channel) # Server variables def actionServerInfo(self, to): - back = self.formatServerInfo() - self.response(to, back) - - # Create a new wrapper nonce that allows to load html file - @flag.admin - def actionServerGetWrapperNonce(self, to): - wrapper_nonce = self.request.getWrapperNonce() - self.response(to, wrapper_nonce) - - def actionAnnouncerInfo(self, to): - back = self.formatAnnouncerInfo(self.site) - self.response(to, back) - - @flag.admin - def actionAnnouncerStats(self, to): - back = {} - trackers = self.site.announcer.getTrackers() - for site in list(self.server.sites.values()): - for tracker, stats in site.announcer.stats.items(): - if tracker not in trackers: - continue - if tracker not in back: - back[tracker] = {} - is_latest_data = bool(stats["time_request"] > back[tracker].get("time_request", 0) and stats["status"]) - for key, val in stats.items(): - if key.startswith("num_"): - back[tracker][key] = back[tracker].get(key, 0) + val - elif is_latest_data: - back[tracker][key] = val - - return back + ret = self.formatServerInfo() + self.response(to, ret) # Sign content.json - def actionSiteSign(self, to, privatekey=None, inner_path="content.json", remove_missing_optional=False, update_changed_files=False, response_ok=True): - self.log.debug("Signing: %s" % inner_path) + def actionSiteSign(self, to, privatekey=None, inner_path="content.json", response_ok=True): site = self.site extend = {} # Extended info for signing - - # Change to the file's content.json - file_info = site.content_manager.getFileInfo(inner_path) - if not inner_path.endswith("content.json"): - if not file_info: - raise Exception("Invalid content.json file: %s" % inner_path) + if not inner_path.endswith("content.json"): # Find the content.json first + file_info = site.content_manager.getFileInfo(inner_path) inner_path = file_info["content_inner_path"] - - # Add certificate to user files - is_user_content = file_info and ("cert_signers" in file_info or "cert_signers_pattern" in file_info) - if is_user_content and privatekey is None: - cert = self.user.getCert(self.site.address) - if not cert: - error = "Site sign failed: No certificate selected for Site: %s, Hence Signing inner_path: %s Failed, Try Adding/Selecting User Cert via Site Login" % (self.site.address, inner_path) - self.log.error(error) - return self.response(to, {"error": error}) - else: + if "cert_signers" in file_info: # Its an user dir file + cert = self.user.getCert(self.site.address) extend["cert_auth_type"] = cert["auth_type"] extend["cert_user_id"] = self.user.getCertUserId(site.address) extend["cert_sign"] = cert["cert_sign"] - self.log.debug("Extending content.json with cert %s" % extend["cert_user_id"]) - if not self.hasFilePermission(inner_path): - self.log.error("SiteSign error: you don't own this site & site owner doesn't allow you to do so.") + if ( + not site.settings["own"] and + self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path) + ): return self.response(to, {"error": "Forbidden, you can only modify your own sites"}) - - if privatekey == "stored": # Get privatekey from sites.json + if privatekey == "stored": privatekey = self.user.getSiteData(self.site.address).get("privatekey") - if not privatekey: - self.cmd("notification", ["error", _["Content signing failed"] + "
    Private key not found in sites.json "]) - self.response(to, {"error": "Site sign failed: Private key not stored."}) - self.log.error("Site sign failed: %s: Private key not stored in sites.json" % inner_path) - return if not privatekey: # Get privatekey from users.json auth_address privatekey = self.user.getAuthPrivatekey(self.site.address) # Signing - # Reload content.json, ignore errors to make it up-to-date - site.content_manager.loadContent(inner_path, add_bad_files=False, force=True) - # Sign using private key sent by user - try: - site.content_manager.sign(inner_path, privatekey, extend=extend, update_changed_files=update_changed_files, remove_missing_optional=remove_missing_optional) - except (VerifyError, SignError) as err: - self.cmd("notification", ["error", _["Content signing failed"] + "
    %s" % err]) - self.response(to, {"error": "Site sign failed: %s" % err}) - self.log.error("Site sign failed: %s: %s" % (inner_path, Debug.formatException(err))) - return - except Exception as err: - self.cmd("notification", ["error", _["Content signing error"] + "
    %s" % Debug.formatException(err)]) - self.response(to, {"error": "Site sign error: %s" % Debug.formatException(err)}) - self.log.error("Site sign error: %s: %s" % (inner_path, Debug.formatException(err))) + site.content_manager.loadContent(add_bad_files=False, force=True) # Reload content.json, ignore errors to make it up-to-date + signed = site.content_manager.sign(inner_path, privatekey, extend=extend) # Sign using private key sent by user + if not signed: + self.cmd("notification", ["error", "Content sign failed: invalid private key."]) + self.response(to, {"error": "Site sign failed"}) return - site.content_manager.loadContent(inner_path, add_bad_files=False) # Load new content.json, ignore errors - - if update_changed_files: - self.site.updateWebsocket(file_done=inner_path) - + site.content_manager.loadContent(add_bad_files=False) # Load new content.json, ignore errors if response_ok: self.response(to, "ok") - else: - return inner_path + + return inner_path # Sign and publish content.json - def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True, remove_missing_optional=False, update_changed_files=False): + def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True): if sign: - inner_path = self.actionSiteSign( - to, privatekey, inner_path, response_ok=False, - remove_missing_optional=remove_missing_optional, update_changed_files=update_changed_files - ) + inner_path = self.actionSiteSign(to, privatekey, inner_path, response_ok=False) if not inner_path: return # Publishing @@ -488,120 +282,55 @@ class UiWebsocket(object): self.site.saveSettings() self.site.announce() - if inner_path not in self.site.content_manager.contents: - return self.response(to, {"error": "File %s not found" % inner_path}) - event_name = "publish %s %s" % (self.site.address, inner_path) - called_instantly = RateLimit.isAllowed(event_name, 30) - thread = RateLimit.callAsync(event_name, 30, self.doSitePublish, self.site, inner_path) # Only publish once in 30 seconds + thread = RateLimit.callAsync(event_name, 7, self.site.publish, 5, inner_path) # Only publish once in 7 second to 5 peers notification = "linked" not in dir(thread) # Only display notification on first callback thread.linked = True - if called_instantly: # Allowed to call instantly - # At the end callback with request id and thread - self.cmd("progress", ["publish", _["Content published to {0}/{1} peers."].format(0, 5), 0]) - thread.link(lambda thread: self.cbSitePublish(to, self.site, thread, notification, callback=notification)) - else: - self.cmd( - "notification", - ["info", _["Content publish queued for {0:.0f} seconds."].format(RateLimit.delayLeft(event_name, 30)), 5000] - ) - self.response(to, "ok") - # At the end display notification - thread.link(lambda thread: self.cbSitePublish(to, self.site, thread, notification, callback=False)) - - def doSitePublish(self, site, inner_path): - def cbProgress(published, limit): - progress = int(float(published) / limit * 100) - self.cmd("progress", [ - "publish", - _["Content published to {0}/{1} peers."].format(published, limit), - progress - ]) - diffs = site.content_manager.getDiffs(inner_path) - back = site.publish(limit=10, inner_path=inner_path, diffs=diffs, cb_progress=cbProgress) - if back == 0: # Failed to publish to anyone - self.cmd("progress", ["publish", _["Content publish failed."], -100]) - else: - cbProgress(back, back) - return back + thread.link(lambda thread: self.cbSitePublish(to, thread, notification)) # At the end callback with request id and thread # Callback of site publish - def cbSitePublish(self, to, site, thread, notification=True, callback=True): + def cbSitePublish(self, to, thread, notification=True): + site = self.site published = thread.value - if published > 0: # Successfully published + if published > 0: # Successfuly published if notification: - # self.cmd("notification", ["done", _["Content published to {0} peers."].format(published), 5000]) - site.updateWebsocket() # Send updated site data to local websocket clients - if callback: + self.cmd("notification", ["done", "Content published to %s peers." % published, 5000]) self.response(to, "ok") + site.updateWebsocket() # Send updated site data to local websocket clients else: if len(site.peers) == 0: - import main - if any(main.file_server.port_opened.values()) or main.file_server.tor_manager.start_onions: + if sys.modules["main"].file_server.port_opened: if notification: - self.cmd("notification", ["info", _["No peers found, but your content is ready to access."]]) - if callback: + self.cmd("notification", ["info", "No peers found, but your content is ready to access.", 5000]) self.response(to, "ok") else: if notification: self.cmd("notification", [ "info", - _("""{_[Your network connection is restricted. Please, open {0} port]}
    - {_[on your router to make your site accessible for everyone.]}""").format(config.fileserver_port) + """Your network connection is restricted. Please, open %s port
    + on your router to make your site accessible for everyone.""" % config.fileserver_port ]) - if callback: self.response(to, {"error": "Port not opened."}) else: if notification: + self.cmd("notification", ["error", "Content publish failed."]) self.response(to, {"error": "Content publish failed."}) - def actionSiteReload(self, to, inner_path): - self.site.content_manager.loadContent(inner_path, add_bad_files=False) - self.site.storage.verifyFiles(quick_check=True) - self.site.updateWebsocket() - return "ok" - # Write a file to disk - def actionFileWrite(self, to, inner_path, content_base64, ignore_bad_files=False): - valid_signers = self.site.content_manager.getValidSigners(inner_path) - auth_address = self.user.getAuthAddress(self.site.address) - if not self.hasFilePermission(inner_path): - self.log.error("FileWrite forbidden %s not in valid_signers %s" % (auth_address, valid_signers)) + def actionFileWrite(self, to, inner_path, content_base64): + if ( + not self.site.settings["own"] and + self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path) + ): return self.response(to, {"error": "Forbidden, you can only modify your own files"}) - # Try not to overwrite files currently in sync - content_inner_path = re.sub("^(.*)/.*?$", "\\1/content.json", inner_path) # Also check the content.json from same directory - if (self.site.bad_files.get(inner_path) or self.site.bad_files.get(content_inner_path)) and not ignore_bad_files: - found = self.site.needFile(inner_path, update=True, priority=10) - if not found: - self.cmd( - "confirm", - [_["This file still in sync, if you write it now, then the previous content may be lost."], _["Write content anyway"]], - lambda res: self.actionFileWrite(to, inner_path, content_base64, ignore_bad_files=True) - ) - return False - try: import base64 content = base64.b64decode(content_base64) - # Save old file to generate patch later - if ( - inner_path.endswith(".json") and not inner_path.endswith("content.json") and - self.site.storage.isFile(inner_path) and not self.site.storage.isFile(inner_path + "-old") - ): - try: - self.site.storage.rename(inner_path, inner_path + "-old") - except Exception: - # Rename failed, fall back to standard file write - f_old = self.site.storage.open(inner_path, "rb") - f_new = self.site.storage.open(inner_path + "-old", "wb") - shutil.copyfileobj(f_old, f_new) - self.site.storage.write(inner_path, content) - except Exception as err: - self.log.error("File write error: %s" % Debug.formatException(err)) - return self.response(to, {"error": "Write error: %s" % Debug.formatException(err)}) + except Exception, err: + return self.response(to, {"error": "Write error: %s" % err}) if inner_path.endswith("content.json"): self.site.content_manager.loadContent(inner_path, add_bad_files=False, force=True) @@ -614,29 +343,16 @@ class UiWebsocket(object): ws.event("siteChanged", self.site, {"event": ["file_done", inner_path]}) def actionFileDelete(self, to, inner_path): - if not self.hasFilePermission(inner_path): - self.log.error("File delete error: you don't own this site & you are not approved by the owner.") + if ( + not self.site.settings["own"] and + self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path) + ): return self.response(to, {"error": "Forbidden, you can only modify your own files"}) - need_delete = True - file_info = self.site.content_manager.getFileInfo(inner_path) - if file_info and file_info.get("optional"): - # Non-existing optional files won't be removed from content.json, so we have to do it manually - self.log.debug("Deleting optional file: %s" % inner_path) - relative_path = file_info["relative_path"] - content_json = self.site.storage.loadJson(file_info["content_inner_path"]) - if relative_path in content_json.get("files_optional", {}): - del content_json["files_optional"][relative_path] - self.site.storage.writeJson(file_info["content_inner_path"], content_json) - self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True) - need_delete = self.site.storage.isFile(inner_path) # File sill exists after removing from content.json (owned site) - - if need_delete: - try: - self.site.storage.delete(inner_path) - except Exception as err: - self.log.error("File delete error: %s" % err) - return self.response(to, {"error": "Delete error: %s" % Debug.formatExceptionMessage(err)}) + try: + self.site.storage.delete(inner_path) + except Exception, err: + return self.response(to, {"error": "Delete error: %s" % err}) self.response(to, "ok") @@ -646,108 +362,42 @@ class UiWebsocket(object): ws.event("siteChanged", self.site, {"event": ["file_deleted", inner_path]}) # Find data in json files - def actionFileQuery(self, to, dir_inner_path, query=None): + def actionFileQuery(self, to, dir_inner_path, query): # s = time.time() dir_path = self.site.storage.getPath(dir_inner_path) - rows = list(QueryJson.query(dir_path, query or "")) + rows = list(QueryJson.query(dir_path, query)) # self.log.debug("FileQuery %s %s done in %s" % (dir_inner_path, query, time.time()-s)) return self.response(to, rows) - # List files in directory - @flag.async_run - def actionFileList(self, to, inner_path): - try: - return list(self.site.storage.walk(inner_path)) - except Exception as err: - self.log.error("fileList %s error: %s" % (inner_path, Debug.formatException(err))) - return {"error": Debug.formatExceptionMessage(err)} - - # List directories in a directory - @flag.async_run - def actionDirList(self, to, inner_path, stats=False): - try: - if stats: - back = [] - for file_name in self.site.storage.list(inner_path): - file_stats = os.stat(self.site.storage.getPath(inner_path + "/" + file_name)) - is_dir = stat.S_ISDIR(file_stats.st_mode) - back.append( - {"name": file_name, "size": file_stats.st_size, "is_dir": is_dir} - ) - return back - else: - return list(self.site.storage.list(inner_path)) - except Exception as err: - self.log.error("dirList %s error: %s" % (inner_path, Debug.formatException(err))) - return {"error": Debug.formatExceptionMessage(err)} - # Sql query def actionDbQuery(self, to, query, params=None, wait_for=None): - if config.debug or config.verbose: - s = time.time() rows = [] try: res = self.site.storage.query(query, params) - except Exception as err: # Response the error to client - self.log.error("DbQuery error: %s" % Debug.formatException(err)) - return self.response(to, {"error": Debug.formatExceptionMessage(err)}) + except Exception, err: # Response the error to client + return self.response(to, {"error": str(err)}) # Convert result to dict for row in res: rows.append(dict(row)) - if config.verbose and time.time() - s > 0.1: # Log slow query - self.log.debug("Slow query: %s (%.3fs)" % (query, time.time() - s)) return self.response(to, rows) # Return file content - @flag.async_run - def actionFileGet(self, to, inner_path, required=True, format="text", timeout=300, priority=6): + def actionFileGet(self, to, inner_path, required=True): try: if required or inner_path in self.site.bad_files: - with gevent.Timeout(timeout): - self.site.needFile(inner_path, priority=priority) - body = self.site.storage.read(inner_path, "rb") - except (Exception, gevent.Timeout) as err: - self.log.debug("%s fileGet error: %s" % (inner_path, Debug.formatException(err))) + self.site.needFile(inner_path, priority=6) + body = self.site.storage.read(inner_path) + except Exception, err: + self.log.debug("%s fileGet error: %s" % (inner_path, err)) body = None + return self.response(to, body) - if not body: - body = None - elif format == "base64": - import base64 - body = base64.b64encode(body).decode() - else: - try: - body = body.decode() - except Exception as err: - self.response(to, {"error": "Error decoding text: %s" % err}) - self.response(to, body) - - @flag.async_run - def actionFileNeed(self, to, inner_path, timeout=300, priority=6): - try: - with gevent.Timeout(timeout): - self.site.needFile(inner_path, priority=priority) - except (Exception, gevent.Timeout) as err: - return self.response(to, {"error": Debug.formatExceptionMessage(err)}) - return self.response(to, "ok") - - def actionFileRules(self, to, inner_path, use_my_cert=False, content=None): - if not content: # No content defined by function call + def actionFileRules(self, to, inner_path): + rules = self.site.content_manager.getRules(inner_path) + if inner_path.endswith("content.json"): content = self.site.content_manager.contents.get(inner_path) - - if not content: # File not created yet - cert = self.user.getCert(self.site.address) - if cert and cert["auth_address"] in self.site.content_manager.getValidSigners(inner_path): - # Current selected cert if valid for this site, add it to query rules - content = {} - content["cert_auth_type"] = cert["auth_type"] - content["cert_user_id"] = self.user.getCertUserId(self.site.address) - content["cert_sign"] = cert["cert_sign"] - - rules = self.site.content_manager.getRules(inner_path, content) - if inner_path.endswith("content.json") and rules: if content: - rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in list(content.get("files", {}).values())]) + rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) else: rules["current_size"] = 0 return self.response(to, rules) @@ -759,184 +409,102 @@ class UiWebsocket(object): if res is True: self.cmd( "notification", - ["done", _("{_[New certificate added]:} {auth_type}/{auth_user_name}@{domain}.")] + ["done", "New certificate added: %s/%s@%s." % (auth_type, auth_user_name, domain)] ) - self.user.setCert(self.site.address, domain) - self.site.updateWebsocket(cert_changed=domain) self.response(to, "ok") - elif res is False: - # Display confirmation of change - cert_current = self.user.certs[domain] - body = _("{_[Your current certificate]:} {cert_current[auth_type]}/{cert_current[auth_user_name]}@{domain}") - self.cmd( - "confirm", - [body, _("Change it to {auth_type}/{auth_user_name}@{domain}")], - lambda res: self.cbCertAddConfirm(to, domain, auth_type, auth_user_name, cert) - ) else: self.response(to, "Not changed") - except Exception as err: - self.log.error("CertAdd error: Exception - %s (%s)" % (err.message, Debug.formatException(err))) + except Exception, err: self.response(to, {"error": err.message}) - def cbCertAddConfirm(self, to, domain, auth_type, auth_user_name, cert): - self.user.deleteCert(domain) - self.user.addCert(self.user.getAuthAddress(self.site.address), domain, auth_type, auth_user_name, cert) - self.cmd( - "notification", - ["done", _("Certificate changed to: {auth_type}/{auth_user_name}@{domain}.")] - ) - self.user.setCert(self.site.address, domain) - self.site.updateWebsocket(cert_changed=domain) - self.response(to, "ok") - # Select certificate for site - def actionCertSelect(self, to, accepted_domains=[], accept_any=False, accepted_pattern=None): + def actionCertSelect(self, to, accepted_domains=[]): accounts = [] - accounts.append(["", _["No certificate"], ""]) # Default option + accounts.append(["", "Unique to site", ""]) # Default option active = "" # Make it active if no other option found # Add my certs auth_address = self.user.getAuthAddress(self.site.address) # Current auth address - site_data = self.user.getSiteData(self.site.address) # Current auth address - - if not accepted_domains and not accepted_pattern: # Accept any if no filter defined - accept_any = True - - for domain, cert in list(self.user.certs.items()): - if auth_address == cert["auth_address"] and domain == site_data.get("cert"): + for domain, cert in self.user.certs.items(): + if auth_address == cert["auth_address"]: active = domain title = cert["auth_user_name"] + "@" + domain - accepted_pattern_match = accepted_pattern and SafeRe.match(accepted_pattern, domain) - if domain in accepted_domains or accept_any or accepted_pattern_match: + if domain in accepted_domains or not accepted_domains: accounts.append([domain, title, ""]) else: accounts.append([domain, title, "disabled"]) # Render the html - body = "" + _["Select account you want to use in this site:"] + "" + body = "Select account you want to use in this site:" # Accounts for domain, account, css_class in accounts: if domain == active: css_class += " active" # Currently selected option - title = _("%s ({_[currently selected]})") % account + title = "%s (currently selected)" % account else: title = "%s" % account body += "%s" % (css_class, domain, title) - # More available providers - more_domains = [domain for domain in accepted_domains if domain not in self.user.certs] # Domains we not displayed yet + # More avalible providers + more_domains = [domain for domain in accepted_domains if domain not in self.user.certs] # Domainains we not displayed yet if more_domains: # body+= "Accepted authorization providers by the site:" body += "
    " for domain in more_domains: - body += _(""" - - {_[Register]} »{domain} + body += """ + + Register »%s - """) + """ % (domain, domain) body += "
    " - script = """ + body += """ + + """ - self.cmd("notification", ["ask", body], lambda domain: self.actionCertSet(to, domain)) - self.cmd("injectScript", script) - - # - Admin actions - - - @flag.admin - def actionPermissionAdd(self, to, permission): - if permission not in self.site.settings["permissions"]: - self.site.settings["permissions"].append(permission) - self.site.saveSettings() - self.site.updateWebsocket(permission_added=permission) - self.response(to, "ok") - - @flag.admin - def actionPermissionRemove(self, to, permission): - self.site.settings["permissions"].remove(permission) - self.site.saveSettings() - self.site.updateWebsocket(permission_removed=permission) - self.response(to, "ok") - - @flag.admin - def actionPermissionDetails(self, to, permission): - if permission == "ADMIN": - self.response(to, _["Modify your client's configuration and access all site"] + " " + _["(Dangerous!)"] + "") - elif permission == "NOSANDBOX": - self.response(to, _["Modify your client's configuration and access all site"] + " " + _["(Dangerous!)"] + "") - elif permission == "PushNotification": - self.response(to, _["Send notifications"]) - else: - self.response(to, "") + # Send the notification + self.cmd("notification", ["ask", body]) # Set certificate that used for authenticate user for site - @flag.admin def actionCertSet(self, to, domain): self.user.setCert(self.site.address, domain) self.site.updateWebsocket(cert_changed=domain) - self.response(to, "ok") - # List user's certificates - @flag.admin - def actionCertList(self, to): - back = [] - auth_address = self.user.getAuthAddress(self.site.address) - for domain, cert in list(self.user.certs.items()): - back.append({ - "auth_address": cert["auth_address"], - "auth_type": cert["auth_type"], - "auth_user_name": cert["auth_user_name"], - "domain": domain, - "selected": cert["auth_address"] == auth_address - }) - return back + # - Admin actions - # List all site info - @flag.admin - def actionSiteList(self, to, connecting_sites=False): + def actionSiteList(self, to): ret = [] - for site in list(self.server.sites.values()): - if not site.content_manager.contents.get("content.json") and not connecting_sites: - continue # Incomplete site + SiteManager.site_manager.load() # Reload sites + for site in self.server.sites.values(): + if not site.content_manager.contents.get("content.json"): + continue # Broken site ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing self.response(to, ret) # Join to an event channel on all sites - @flag.admin def actionChannelJoinAllsite(self, to, channel): if channel not in self.channels: # Add channel to channels self.channels.append(channel) - for site in list(self.server.sites.values()): # Add websocket to every channel + for site in self.server.sites.values(): # Add websocket to every channel if self not in site.websockets: site.websockets.append(self) - self.response(to, "ok") - # Update site content.json - def actionSiteUpdate(self, to, address, check_files=False, since=None, announce=False): - def updateThread(): - site.update(announce=announce, check_files=check_files, since=since) - self.response(to, "Updated") - + def actionSiteUpdate(self, to, address): site = self.server.sites.get(address) if site and (site.address == self.site.address or "ADMIN" in self.site.settings["permissions"]): - if not site.settings["serving"]: - site.settings["serving"] = True - site.saveSettings() - - gevent.spawn(updateThread) + gevent.spawn(site.update) else: self.response(to, {"error": "Unknown site: %s" % address}) # Pause site serving - @flag.admin def actionSitePause(self, to, address): site = self.server.sites.get(address) if site: @@ -944,12 +512,10 @@ class UiWebsocket(object): site.saveSettings() site.updateWebsocket() site.worker_manager.stopWorkers() - self.response(to, "Paused") else: self.response(to, {"error": "Unknown site: %s" % address}) # Resume site serving - @flag.admin def actionSiteResume(self, to, address): site = self.server.sites.get(address) if site: @@ -958,321 +524,47 @@ class UiWebsocket(object): gevent.spawn(site.update, announce=True) time.sleep(0.001) # Wait for update thread starting site.updateWebsocket() - self.response(to, "Resumed") else: self.response(to, {"error": "Unknown site: %s" % address}) - @flag.admin - @flag.no_multiuser def actionSiteDelete(self, to, address): site = self.server.sites.get(address) if site: - site.delete() + site.settings["serving"] = False + site.saveSettings() + site.worker_manager.running = False + site.worker_manager.stopWorkers() + site.storage.deleteFiles() + site.updateWebsocket() + SiteManager.site_manager.delete(address) self.user.deleteSiteData(address) - self.response(to, "Deleted") - import gc - gc.collect(2) else: self.response(to, {"error": "Unknown site: %s" % address}) - def cbSiteClone(self, to, address, root_inner_path="", target_address=None, redirect=True): - self.cmd("notification", ["info", _["Cloning site..."]]) + def actionSiteClone(self, to, address): + self.cmd("notification", ["info", "Cloning site..."]) site = self.server.sites.get(address) - response = {} - if target_address: - target_site = self.server.sites.get(target_address) - privatekey = self.user.getSiteData(target_site.address).get("privatekey") - site.clone(target_address, privatekey, root_inner_path=root_inner_path) - self.cmd("notification", ["done", _["Site source code upgraded!"]]) - site.publish() - response = {"address": target_address} - else: - # Generate a new site from user's bip32 seed - new_address, new_address_index, new_site_data = self.user.getNewSiteData() - new_site = site.clone(new_address, new_site_data["privatekey"], address_index=new_address_index, root_inner_path=root_inner_path) - new_site.settings["own"] = True - new_site.saveSettings() - self.cmd("notification", ["done", _["Site cloned"]]) - if redirect: - self.cmd("redirect", "/%s" % new_address) - gevent.spawn(new_site.announce) - response = {"address": new_address} - self.response(to, response) - return "ok" + # Generate a new site from user's bip32 seed + new_address, new_address_index, new_site_data = self.user.getNewSiteData() + new_site = site.clone(new_address, new_site_data["privatekey"], address_index=new_address_index) + new_site.settings["own"] = True + new_site.saveSettings() + self.cmd("notification", ["done", "Site cloned" % new_address]) + gevent.spawn(new_site.announce) - @flag.no_multiuser - def actionSiteClone(self, to, address, root_inner_path="", target_address=None, redirect=True): - if not SiteManager.site_manager.isAddress(address): - self.response(to, {"error": "Not a site: %s" % address}) - return - - if not self.server.sites.get(address): - # Don't expose site existence - return - - site = self.server.sites.get(address) - if site.bad_files: - for bad_inner_path in list(site.bad_files.keys()): - is_user_file = "cert_signers" in site.content_manager.getRules(bad_inner_path) - if not is_user_file and bad_inner_path != "content.json": - self.cmd("notification", ["error", _["Clone error: Site still in sync"]]) - return {"error": "Site still in sync"} - - if "ADMIN" in self.getPermissions(to): - self.cbSiteClone(to, address, root_inner_path, target_address, redirect) - else: - self.cmd( - "confirm", - [_["Clone site %s?"] % address, _["Clone"]], - lambda res: self.cbSiteClone(to, address, root_inner_path, target_address, redirect) - ) - - @flag.admin - @flag.no_multiuser def actionSiteSetLimit(self, to, size_limit): self.site.settings["size_limit"] = int(size_limit) self.site.saveSettings() - self.response(to, "ok") - self.site.updateWebsocket() + self.response(to, "Site size limit changed to %sMB" % size_limit) self.site.download(blind_includes=True) - @flag.admin - def actionSiteAdd(self, to, address): - site_manager = SiteManager.site_manager - if address in site_manager.sites: - return {"error": "Site already added"} - else: - if site_manager.need(address): - return "ok" - else: - return {"error": "Invalid address"} - - @flag.async_run - def actionSiteListModifiedFiles(self, to, content_inner_path="content.json"): - content = self.site.content_manager.contents.get(content_inner_path) - if not content: - return {"error": "content file not avaliable"} - - min_mtime = content.get("modified", 0) - site_path = self.site.storage.directory - modified_files = [] - - # Load cache if not signed since last modified check - if content.get("modified", 0) < self.site.settings["cache"].get("time_modified_files_check", 0): - min_mtime = self.site.settings["cache"].get("time_modified_files_check") - modified_files = self.site.settings["cache"].get("modified_files", []) - - inner_paths = [content_inner_path] + list(content.get("includes", {}).keys()) + list(content.get("files", {}).keys()) - - if len(inner_paths) > 100: - return {"error": "Too many files in content.json"} - - for relative_inner_path in inner_paths: - inner_path = helper.getDirname(content_inner_path) + relative_inner_path - try: - is_mtime_newer = os.path.getmtime(self.site.storage.getPath(inner_path)) > min_mtime + 1 - if is_mtime_newer: - if inner_path.endswith("content.json"): - is_modified = self.site.content_manager.isModified(inner_path) - else: - previous_size = content["files"][inner_path]["size"] - is_same_size = self.site.storage.getSize(inner_path) == previous_size - ext = inner_path.rsplit(".", 1)[-1] - is_text_file = ext in ["json", "txt", "html", "js", "css"] - if is_same_size: - if is_text_file: - is_modified = self.site.content_manager.isModified(inner_path) # Check sha512 hash - else: - is_modified = False - else: - is_modified = True - - # Check ran, modified back to original value, but in the cache - if not is_modified and inner_path in modified_files: - modified_files.remove(inner_path) - else: - is_modified = False - except Exception as err: - if not self.site.storage.isFile(inner_path): # File deleted - is_modified = True - else: - raise err - if is_modified and inner_path not in modified_files: - modified_files.append(inner_path) - - self.site.settings["cache"]["time_modified_files_check"] = time.time() - self.site.settings["cache"]["modified_files"] = modified_files - return {"modified_files": modified_files} - - @flag.admin - def actionSiteSetSettingsValue(self, to, key, value): - if key not in ["modified_files_notification"]: - return {"error": "Can't change this key"} - - self.site.settings[key] = value - - return "ok" - - def actionUserGetSettings(self, to): - settings = self.user.sites.get(self.site.address, {}).get("settings", {}) - self.response(to, settings) - - def actionUserSetSettings(self, to, settings): - self.user.setSiteSettings(self.site.address, settings) - self.response(to, "ok") - - def actionUserGetGlobalSettings(self, to): - settings = self.user.settings - self.response(to, settings) - - @flag.admin - def actionUserSetGlobalSettings(self, to, settings): - self.user.settings = settings - self.user.save() - self.response(to, "ok") - - @flag.admin - @flag.no_multiuser - def actionServerErrors(self, to): - return config.error_logger.lines - - @flag.admin - @flag.no_multiuser def actionServerUpdate(self, to): - def cbServerUpdate(res): - self.response(to, res) - if not res: - return False - for websocket in self.server.websockets: - websocket.cmd( - "notification", - ["info", _["Updating ZeroNet client, will be back in a few minutes..."], 20000] - ) - websocket.cmd("updating") + self.cmd("updating") + sys.modules["main"].update_after_shutdown = True + sys.modules["main"].file_server.stop() + sys.modules["main"].ui_server.stop() - import main - main.update_after_shutdown = True - main.restart_after_shutdown = True - SiteManager.site_manager.save() - main.file_server.stop() - main.ui_server.stop() - - self.cmd( - "confirm", - [_["Update ZeroNet client to latest version?"], _["Update"]], - cbServerUpdate - ) - - @flag.admin - @flag.async_run - @flag.no_multiuser def actionServerPortcheck(self, to): - import main - file_server = main.file_server - file_server.portCheck() - self.response(to, file_server.port_opened) - - @flag.admin - @flag.no_multiuser - def actionServerShutdown(self, to, restart=False): - import main - def cbServerShutdown(res): - self.response(to, res) - if not res: - return False - if restart: - main.restart_after_shutdown = True - main.file_server.stop() - main.ui_server.stop() - - if restart: - message = [_["Restart ZeroNet client?"], _["Restart"]] - else: - message = [_["Shut down ZeroNet client?"], _["Shut down"]] - self.cmd("confirm", message, cbServerShutdown) - - @flag.admin - @flag.no_multiuser - def actionServerShowdirectory(self, to, directory="backup", inner_path=""): - if self.request.env["REMOTE_ADDR"] != "127.0.0.1": - return self.response(to, {"error": "Only clients from 127.0.0.1 allowed to run this command"}) - - import webbrowser - if directory == "backup": - path = os.path.abspath(config.data_dir) - elif directory == "log": - path = os.path.abspath(config.log_dir) - elif directory == "site": - path = os.path.abspath(self.site.storage.getPath(helper.getDirname(inner_path))) - - if os.path.isdir(path): - self.log.debug("Opening: %s" % path) - webbrowser.open('file://' + path) - return self.response(to, "ok") - else: - return self.response(to, {"error": "Not a directory"}) - - @flag.admin - @flag.no_multiuser - def actionConfigSet(self, to, key, value): - import main - - self.log.debug("Changing config %s value to %r" % (key, value)) - if key not in config.keys_api_change_allowed: - self.response(to, {"error": "Forbidden: You cannot set this config key"}) - return - - if key == "open_browser": - if value not in ["default_browser", "False"]: - self.response(to, {"error": "Forbidden: Invalid value"}) - return - - # Remove empty lines from lists - if type(value) is list: - value = [line for line in value if line] - - config.saveValue(key, value) - - if key not in config.keys_restart_need: - if value is None: # Default value - setattr(config, key, config.parser.get_default(key)) - setattr(config.arguments, key, config.parser.get_default(key)) - else: - setattr(config, key, value) - setattr(config.arguments, key, value) - else: - config.need_restart = True - config.pending_changes[key] = value - - if key == "language": - import Translate - for translate in Translate.translates: - translate.setLanguage(value) - message = _["You have successfully changed the web interface's language!"] + "
    " - message += _["Due to the browser's caching, the full transformation could take some minute."] - self.cmd("notification", ["done", message, 10000]) - - if key == "tor_use_bridges": - if value is None: - value = False - else: - value = True - tor_manager = main.file_server.tor_manager - tor_manager.request("SETCONF UseBridges=%i" % value) - - if key == "trackers_file": - config.loadTrackersFile() - - if key == "log_level": - logging.getLogger('').setLevel(logging.getLevelName(config.log_level)) - - if key == "ip_external": - gevent.spawn(main.file_server.portCheck) - - if key == "offline": - if value: - main.file_server.closeConnections() - else: - gevent.spawn(main.file_server.checkSites, check_files=False, force_port_check=True) - - self.response(to, "ok") + sys.modules["main"].file_server.port_opened = None + res = sys.modules["main"].file_server.openport() + self.response(to, res) diff --git a/src/Ui/__init__.py b/src/Ui/__init__.py index dcb8896d..9982dc4f 100644 --- a/src/Ui/__init__.py +++ b/src/Ui/__init__.py @@ -1,3 +1,3 @@ -from .UiServer import UiServer -from .UiRequest import UiRequest -from .UiWebsocket import UiWebsocket \ No newline at end of file +from UiServer import UiServer +from UiRequest import UiRequest +from UiWebsocket import UiWebsocket \ No newline at end of file diff --git a/src/Ui/media/Fixbutton.coffee b/src/Ui/media/Fixbutton.coffee index 954d2b56..9e644a4e 100644 --- a/src/Ui/media/Fixbutton.coffee +++ b/src/Ui/media/Fixbutton.coffee @@ -11,7 +11,7 @@ class Fixbutton return true $(".fixbutton-bg").stop().animate({"scale": 0.6}, 300, "easeOutCubic") $(".fixbutton-burger").stop().animate({"opacity": 0, "left": -20}, 300, "easeOutCubic") - $(".fixbutton-text").stop().animate({"opacity": 0.9, "left": 0}, 300, "easeOutBack") + $(".fixbutton-text").stop().animate({"opacity": 1, "left": 0}, 300, "easeOutBack") ###$(".fixbutton-bg").on "click", -> diff --git a/src/Ui/media/Infopanel.coffee b/src/Ui/media/Infopanel.coffee deleted file mode 100644 index 3a490364..00000000 --- a/src/Ui/media/Infopanel.coffee +++ /dev/null @@ -1,57 +0,0 @@ -class Infopanel - constructor: (@elem) -> - @visible = false - - show: (closed=false) => - @elem.parent().addClass("visible") - if closed - @close() - else - @open() - - unfold: => - @elem.toggleClass("unfolded") - return false - - updateEvents: => - @elem.off("click") - @elem.find(".close").off("click") - @elem.find(".line").off("click") - - @elem.find(".line").on("click", @unfold) - - if @elem.hasClass("closed") - @elem.on "click", => - @onOpened() - @open() - else - @elem.find(".close").on "click", => - @onClosed() - @close() - - hide: => - @elem.parent().removeClass("visible") - - close: => - @elem.addClass("closed") - @updateEvents() - return false - - open: => - @elem.removeClass("closed") - @updateEvents() - return false - - setTitle: (line1, line2) => - @elem.find(".line-1").text(line1) - @elem.find(".line-2").text(line2) - - setClosedNum: (num) => - @elem.find(".closed-num").text(num) - - setAction: (title, func) => - @elem.find(".button").text(title).off("click").on("click", func) - - - -window.Infopanel = Infopanel diff --git a/src/Ui/media/Loading.coffee b/src/Ui/media/Loading.coffee index 8e35ce66..a18ae4f9 100644 --- a/src/Ui/media/Loading.coffee +++ b/src/Ui/media/Loading.coffee @@ -1,22 +1,14 @@ class Loading - constructor: (@wrapper) -> + constructor: -> if window.show_loadingscreen then @showScreen() - @timer_hide = null - @timer_set = null + setProgress: (percent) -> - if @timer_hide - clearInterval @timer_hide - @timer_set = RateLimit 500, -> - $(".progressbar").css("transform": "scaleX(#{parseInt(percent*100)/100})").css("opacity", "1").css("display", "block") + $(".progressbar").css("width", percent*100+"%").css("opacity", "1").css("display", "block") hideProgress: -> - @log "hideProgress" - if @timer_set - clearInterval @timer_set - @timer_hide = setTimeout ( => - $(".progressbar").css("transform": "scaleX(1)").css("opacity", "0").hideLater(1000) - ), 300 + console.log "hideProgress" + $(".progressbar").css("width", "100%").css("opacity", "0").hideLater(1000) showScreen: -> @@ -26,37 +18,20 @@ class Loading showTooLarge: (site_info) -> - @log "Displaying large site confirmation" if $(".console .button-setlimit").length == 0 # Not displaying it yet line = @printLine("Site size: #{parseInt(site_info.settings.size/1024/1024)}MB is larger than default allowed #{parseInt(site_info.size_limit)}MB", "warning") - button = $("" + "Open site and set size limit to #{site_info.next_size_limit}MB" + "") - button.on "click", => - button.addClass("loading") - return @wrapper.setSizeLimit(site_info.next_size_limit) + button = $("Open site and set size limit to #{site_info.next_size_limit}MB") + button.on "click", (-> return window.wrapper.setSizeLimit(site_info.next_size_limit) ) line.after(button) setTimeout (=> @printLine('Ready.') ), 100 - showTrackerTorBridge: (server_info) -> - if $(".console .button-settrackerbridge").length == 0 and not server_info.tor_use_meek_bridges - line = @printLine("Tracker connection error detected.", "error") - button = $("" + "Use Tor meek bridges for tracker connections" + "") - button.on "click", => - button.addClass("loading") - @wrapper.ws.cmd "configSet", ["tor_use_bridges", ""] - @wrapper.ws.cmd "configSet", ["trackers_proxy", "tor"] - @wrapper.ws.cmd "siteUpdate", {address: @wrapper.site_info.address, announce: true} - @wrapper.reloadIframe() - return false - line.after(button) - if not server_info.tor_has_meek_bridges - button.addClass("disabled") - @printLine("No meek bridge support in your client, please download the latest bundle.", "warning") + # We dont need loadingscreen anymore hideScreen: -> - @log "hideScreen" + console.log "hideScreen" if not $(".loadingscreen").hasClass("done") # Only if its not animating already if @screen_visible # Hide with animate $(".loadingscreen").addClass("done").removeLater(2000) @@ -84,8 +59,6 @@ class Loading if type == "warning" then line.addClass("console-warning") return line - log: (args...) -> - console.log "[Loading]", args... -window.Loading = Loading +window.Loading = Loading \ No newline at end of file diff --git a/src/Ui/media/Notifications.coffee b/src/Ui/media/Notifications.coffee index 35d949f3..1a7f94fa 100644 --- a/src/Ui/media/Notifications.coffee +++ b/src/Ui/media/Notifications.coffee @@ -13,7 +13,7 @@ class Notifications add: (id, type, body, timeout=0) -> - id = id.replace /[^A-Za-z0-9-]/g, "" + id = id.replace /[^A-Za-z0-9]/g, "" # Close notifications with same id for elem in $(".notification-#{id}") @close $(elem) @@ -21,23 +21,19 @@ class Notifications # Create element elem = $(".notification.template", @elem).clone().removeClass("template") elem.addClass("notification-#{type}").addClass("notification-#{id}") - if type == "progress" - elem.addClass("notification-done") # Update text if type == "error" $(".notification-icon", elem).html("!") else if type == "done" $(".notification-icon", elem).html("
    ") - else if type == "progress" - $(".notification-icon", elem).html("
    ") else if type == "ask" $(".notification-icon", elem).html("?") else $(".notification-icon", elem).html("i") if typeof(body) == "string" - $(".body", elem).html("
    "+body+"
    ") + $(".body", elem).html(""+body+"") else $(".body", elem).html("").append(body) @@ -51,13 +47,12 @@ class Notifications ), timeout # Animate - width = Math.min(elem.outerWidth() + 70, 580) + width = elem.outerWidth() if not timeout then width += 20 # Add space for close button if elem.outerHeight() > 55 then elem.addClass("long") elem.css({"width": "50px", "transform": "scale(0.01)"}) elem.animate({"scale": 1}, 800, "easeOutElastic") elem.animate({"width": width}, 700, "easeInOutCubic") - $(".body", elem).css("width": (width - 50)) $(".body", elem).cssLater("box-shadow", "0px 0px 5px rgba(0,0,0,0.1)", 1000) # Close button or Confirm button @@ -69,13 +64,6 @@ class Notifications $(".select", elem).on "click", => @close elem - # Input enter - $("input", elem).on "keyup", (e) => - if e.keyCode == 13 - @close elem - - return elem - close: (elem) -> elem.stop().animate {"width": 0, "opacity": 0}, 700, "easeInOutCubic" @@ -86,4 +74,4 @@ class Notifications console.log "[Notifications]", args... -window.Notifications = Notifications +window.Notifications = Notifications \ No newline at end of file diff --git a/src/Ui/media/Wrapper.coffee b/src/Ui/media/Wrapper.coffee index 1b98855e..708756d4 100644 --- a/src/Ui/media/Wrapper.coffee +++ b/src/Ui/media/Wrapper.coffee @@ -2,13 +2,8 @@ class Wrapper constructor: (ws_url) -> @log "Created!" - @loading = new Loading(@) + @loading = new Loading() @notifications = new Notifications($(".notifications")) - @infopanel = new Infopanel($(".infopanel")) - @infopanel.onClosed = => - @ws.cmd("siteSetSettingsValue", ["modified_files_notification", false]) - @infopanel.onOpened = => - @ws.cmd("siteSetSettingsValue", ["modified_files_notification", true]) @fixbutton = new Fixbutton() window.addEventListener("message", @onMessageInner, false) @@ -21,52 +16,27 @@ class Wrapper @ws.connect() @ws_error = null # Ws error message - @next_cmd_message_id = -1 - @site_info = null # Hold latest site info - @server_info = null # Hold latest server info @event_site_info = $.Deferred() # Event when site_info received @inner_loaded = false # If iframe loaded or not @inner_ready = false # Inner frame ready to receive messages @wrapperWsInited = false # Wrapper notified on websocket open @site_error = null # Latest failed file download @address = null - @opener_tested = false - @announcer_line = null - @web_notifications = {} - @is_title_changed = false - @allowed_event_constructors = [window.MouseEvent, window.KeyboardEvent, window.PointerEvent] # Allowed event constructors - - window.onload = @onPageLoad # On iframe loaded - window.onhashchange = (e) => # On hash change + window.onload = @onLoad # On iframe loaded + $(window).on "hashchange", => # On hash change @log "Hashchange", window.location.hash if window.location.hash src = $("#inner-iframe").attr("src").replace(/#.*/, "")+window.location.hash $("#inner-iframe").attr("src", src) - window.onpopstate = (e) => - @sendInner {"cmd": "wrapperPopState", "params": {"href": document.location.href, "state": e.state}} - $("#inner-iframe").focus() - verifyEvent: (allowed_target, e) => - if not e.originalEvent.isTrusted - throw "Event not trusted" - - if e.originalEvent.constructor not in @allowed_event_constructors - throw "Invalid event constructor: #{e.constructor} not in #{JSON.stringify(@allowed_event_constructors)}" - - if e.originalEvent.currentTarget != allowed_target[0] - throw "Invalid event target: #{e.originalEvent.currentTarget} != #{allowed_target[0]}" - # Incoming message from UiServer websocket onMessageWebsocket: (e) => message = JSON.parse(e.data) - @handleMessageWebsocket(message) - - handleMessageWebsocket: (message) => cmd = message.cmd if cmd == "response" if @ws.waiting_cb[message.to]? # We are waiting for response @@ -75,89 +45,36 @@ class Wrapper @sendInner message # Pass message to inner frame else if cmd == "notification" # Display notification type = message.params[0] - id = "notification-ws-#{message.id}" - if "-" in message.params[0] # - in first param: message id defined + id = "notification-#{message.id}" + if "-" in message.params[0] # - in first param: message id definied [id, type] = message.params[0].split("-") @notifications.add(id, type, message.params[1], message.params[2]) - else if cmd == "progress" # Display notification - @actionProgress(message) else if cmd == "prompt" # Prompt input - @displayPrompt message.params[0], message.params[1], message.params[2], message.params[3], (res) => - @ws.response message.id, res - else if cmd == "confirm" # Confirm action - @displayConfirm message.params[0], message.params[1], (res) => + @displayPrompt message.params[0], message.params[1], message.params[2], (res) => @ws.response message.id, res else if cmd == "setSiteInfo" @sendInner message # Pass to inner frame if message.params.address == @address # Current page @setSiteInfo message.params - @updateProgress message.params - else if cmd == "setAnnouncerInfo" - @sendInner message # Pass to inner frame - if message.params.address == @address # Current page - @setAnnouncerInfo message.params - @updateProgress message.params else if cmd == "error" @notifications.add("notification-#{message.id}", "error", message.params, 0) else if cmd == "updating" # Close connection - @log "Updating: Closing websocket" @ws.ws.close() @ws.onCloseWebsocket(null, 4000) - else if cmd == "redirect" - window.top.location = message.params - else if cmd == "injectHtml" - $("body").append(message.params) - else if cmd == "injectScript" - script_tag = $(" -

    ZeroNet requires JavaScript support.

    If you use NoScript/Tor browser: Click on toolbar icon with the notification and choose "Temp. TRUSTED" for 127.0.0.1. -
    - -
    @@ -38,31 +31,20 @@ else if (window.opener && window.opener.location.toString()) {
    -
    +
    0
    - +
    +
    ! Test notification×
    - -
    -
    - 8 -
    - 8 modified files
    content.json, data.json -
    - Sign & Publish - × -
    -
    - Config
    @@ -72,32 +54,20 @@ else if (window.opener && window.opener.location.toString()) { - + - - - + + diff --git a/src/User/User.py b/src/User/User.py index dbcfc56f..95c0661d 100644 --- a/src/User/User.py +++ b/src/User/User.py @@ -1,16 +1,11 @@ import logging import json import time -import binascii -import gevent - -import util from Crypt import CryptBitcoin from Plugin import PluginManager from Config import config from util import helper -from Debug import Debug @PluginManager.acceptPlugins @@ -27,15 +22,11 @@ class User(object): self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed) self.sites = data.get("sites", {}) self.certs = data.get("certs", {}) - self.settings = data.get("settings", {}) - self.delayed_save_thread = None self.log = logging.getLogger("User:%s" % self.master_address) # Save to data/users.json - @util.Noparallel(queue=True, ignore_class=True) def save(self): - s = time.time() users = json.load(open("%s/users.json" % config.data_dir)) if self.master_address not in users: users[self.master_address] = {} # Create if not exist @@ -44,30 +35,11 @@ class User(object): user_data["master_seed"] = self.master_seed user_data["sites"] = self.sites user_data["certs"] = self.certs - user_data["settings"] = self.settings - helper.atomicWrite("%s/users.json" % config.data_dir, helper.jsonDumps(users).encode("utf8")) - self.log.debug("Saved in %.3fs" % (time.time() - s)) - self.delayed_save_thread = None - - def saveDelayed(self): - if not self.delayed_save_thread: - self.delayed_save_thread = gevent.spawn_later(5, self.save) + helper.atomicWrite("%s/users.json" % config.data_dir, json.dumps(users, indent=2, sort_keys=True)) + self.log.debug("Saved") def getAddressAuthIndex(self, address): - return int(binascii.hexlify(address.encode()), 16) - - @util.Noparallel() - def generateAuthAddress(self, address): - s = time.time() - address_id = self.getAddressAuthIndex(address) # Convert site address to int - auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id) - self.sites[address] = { - "auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey), - "auth_privatekey": auth_privatekey - } - self.saveDelayed() - self.log.debug("Added new site: %s in %.3fs" % (address, time.time() - s)) - return self.sites[address] + return int(address.encode("hex"), 16) # Get user site data # Return: {"auth_address": "xxx", "auth_privatekey": "xxx"} @@ -75,21 +47,23 @@ class User(object): if address not in self.sites: # Generate new BIP32 child key based on site address if not create: return {"auth_address": None, "auth_privatekey": None} # Dont create user yet - self.generateAuthAddress(address) + s = time.time() + address_id = self.getAddressAuthIndex(address) # Convert site address to int + auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id) + self.sites[address] = { + "auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey), + "auth_privatekey": auth_privatekey + } + self.save() + self.log.debug("Added new site: %s in %.3fs" % (address, time.time() - s)) return self.sites[address] def deleteSiteData(self, address): if address in self.sites: del(self.sites[address]) - self.saveDelayed() + self.save() self.log.debug("Deleted site: %s" % address) - def setSiteSettings(self, address, settings): - site_data = self.getSiteData(address) - site_data["settings"] = settings - self.saveDelayed() - return site_data - # Get data for a new, unique site # Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}] def getNewSiteData(self): @@ -123,8 +97,9 @@ class User(object): # Add cert for the user def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign): + domain = domain.lower() # Find privatekey by auth address - auth_privatekey = [site["auth_privatekey"] for site in list(self.sites.values()) if site["auth_address"] == auth_address][0] + auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0] cert_node = { "auth_address": auth_address, "auth_privatekey": auth_privatekey, @@ -134,7 +109,10 @@ class User(object): } # Check if we have already cert for that domain and its not the same if self.certs.get(domain) and self.certs[domain] != cert_node: - return False + raise Exception( + "You already have certificate for this domain: %s/%s@%s" % + (self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain) + ) elif self.certs.get(domain) == cert_node: # Same, not updated return None else: # Not exist yet, add @@ -142,19 +120,13 @@ class User(object): self.save() return True - # Remove cert from user - def deleteCert(self, domain): - del self.certs[domain] - - # Set active cert for a site def setCert(self, address, domain): site_data = self.getSiteData(address) if domain: site_data["cert"] = domain else: - if "cert" in site_data: - del site_data["cert"] - self.saveDelayed() + del site_data["cert"] + self.save() return site_data # Get cert for the site address diff --git a/src/User/UserManager.py b/src/User/UserManager.py index 067734a6..dff7ece1 100644 --- a/src/User/UserManager.py +++ b/src/User/UserManager.py @@ -1,10 +1,9 @@ # Included modules import json import logging -import time # ZeroNet Modules -from .User import User +from User import User from Plugin import PluginManager from Config import config @@ -13,7 +12,6 @@ from Config import config class UserManager(object): def __init__(self): self.users = {} - self.log = logging.getLogger("UserManager") # Load all user from data/users.json def load(self): @@ -22,15 +20,8 @@ class UserManager(object): user_found = [] added = 0 - s = time.time() # Load new users - try: - json_path = "%s/users.json" % config.data_dir - data = json.load(open(json_path)) - except Exception as err: - raise Exception("Unable to load %s: %s" % (json_path, err)) - - for master_address, data in list(data.items()): + for master_address, data in json.load(open("%s/users.json" % config.data_dir)).items(): if master_address not in self.users: user = User(master_address, data=data) self.users[master_address] = user @@ -38,23 +29,22 @@ class UserManager(object): user_found.append(master_address) # Remove deleted adresses - for master_address in list(self.users.keys()): + for master_address in self.users.keys(): if master_address not in user_found: del(self.users[master_address]) - self.log.debug("Removed user: %s" % master_address) + logging.debug("Removed user: %s" % master_address) if added: - self.log.debug("Added %s users in %.3fs" % (added, time.time() - s)) + logging.debug("UserManager added %s users" % added) # Create new user # Return: User def create(self, master_address=None, master_seed=None): - self.list() # Load the users if it's not loaded yet user = User(master_address, master_seed) - self.log.debug("Created user: %s" % user.master_address) + logging.debug("Created user: %s" % user.master_address) if user.master_address: # If successfully created self.users[user.master_address] = user - user.saveDelayed() + user.save() return user # List all users from data/users.json @@ -69,9 +59,24 @@ class UserManager(object): def get(self, master_address=None): users = self.list() if users: - return list(users.values())[0] # Single user mode, always return the first + return users.values()[0] # Single user mode, always return the first else: return None user_manager = UserManager() # Singleton + + +# Debug: Reload User.py +def reloadModule(): + return "Not used" + + import imp + global User, UserManager, user_manager + User = imp.load_source("User", "src/User/User.py").User # Reload source + # module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module + # UserManager = module.UserManager + # user_manager = module.user_manager + # Reload users + user_manager = UserManager() + user_manager.load() diff --git a/src/User/__init__.py b/src/User/__init__.py index 4db9149e..8d569979 100644 --- a/src/User/__init__.py +++ b/src/User/__init__.py @@ -1 +1 @@ -from .User import User +from User import User diff --git a/src/Worker/Worker.py b/src/Worker/Worker.py index b7111ba1..1d0e9fe2 100644 --- a/src/Worker/Worker.py +++ b/src/Worker/Worker.py @@ -1,23 +1,8 @@ import time import gevent -import gevent.lock from Debug import Debug -from Config import config -from Content.ContentManager import VerifyError - - -class WorkerDownloadError(Exception): - pass - - -class WorkerIOError(Exception): - pass - - -class WorkerStop(Exception): - pass class Worker(object): @@ -29,8 +14,6 @@ class Worker(object): self.key = None self.running = False self.thread = None - self.num_downloaded = 0 - self.num_failed = 0 def __str__(self): return "Worker %s %s" % (self.manager.site.address_short, self.key) @@ -38,181 +21,63 @@ class Worker(object): def __repr__(self): return "<%s>" % self.__str__() - def waitForTask(self, task, timeout): # Wait for other workers to finish the task - for sleep_i in range(1, timeout * 10): - time.sleep(0.1) - if task["done"] or task["workers_num"] == 0: - if config.verbose: - self.manager.log.debug("%s: %s, picked task free after %ss sleep. (done: %s)" % ( - self.key, task["inner_path"], 0.1 * sleep_i, task["done"] - )) - break - - if sleep_i % 10 == 0: - workers = self.manager.findWorkers(task) - if not workers or not workers[0].peer.connection: - break - worker_idle = time.time() - workers[0].peer.connection.last_recv_time - if worker_idle > 1: - if config.verbose: - self.manager.log.debug("%s: %s, worker %s seems idle, picked up task after %ss sleep. (done: %s)" % ( - self.key, task["inner_path"], workers[0].key, 0.1 * sleep_i, task["done"] - )) - break - return True - - def pickTask(self): # Find and select a new task for the worker - task = self.manager.getTask(self.peer) - if not task: # No more task - time.sleep(0.1) # Wait a bit for new tasks - task = self.manager.getTask(self.peer) - if not task: # Still no task, stop it - stats = "downloaded files: %s, failed: %s" % (self.num_downloaded, self.num_failed) - self.manager.log.debug("%s: No task found, stopping (%s)" % (self.key, stats)) - return False - - if not task["time_started"]: - task["time_started"] = time.time() # Task started now - - if task["workers_num"] > 0: # Wait a bit if someone already working on it - if task["peers"]: # It's an update - timeout = 3 - else: - timeout = 1 - - if task["size"] > 100 * 1024 * 1024: - timeout = timeout * 2 - - if config.verbose: - self.manager.log.debug("%s: Someone already working on %s (pri: %s), sleeping %s sec..." % ( - self.key, task["inner_path"], task["priority"], timeout - )) - - self.waitForTask(task, timeout) - return task - - def downloadTask(self, task): - try: - buff = self.peer.getFile(task["site"].address, task["inner_path"], task["size"]) - except Exception as err: - self.manager.log.debug("%s: getFile error: %s" % (self.key, err)) - raise WorkerDownloadError(str(err)) - - if not buff: - raise WorkerDownloadError("No response") - - return buff - - def getTaskLock(self, task): - if task["lock"] is None: - task["lock"] = gevent.lock.Semaphore() - return task["lock"] - - def writeTask(self, task, buff): - buff.seek(0) - try: - task["site"].storage.write(task["inner_path"], buff) - except Exception as err: - if type(err) == Debug.Notify: - self.manager.log.debug("%s: Write aborted: %s (%s: %s)" % (self.key, task["inner_path"], type(err), err)) - else: - self.manager.log.error("%s: Error writing: %s (%s: %s)" % (self.key, task["inner_path"], type(err), err)) - raise WorkerIOError(str(err)) - - def onTaskVerifyFail(self, task, error_message): - self.num_failed += 1 - if self.manager.started_task_num < 50 or config.verbose: - self.manager.log.debug( - "%s: Verify failed: %s, error: %s, failed peers: %s, workers: %s" % - (self.key, task["inner_path"], error_message, len(task["failed"]), task["workers_num"]) - ) - task["failed"].append(self.peer) - self.peer.hash_failed += 1 - if self.peer.hash_failed >= max(len(self.manager.tasks), 3) or self.peer.connection_error > 10: - # Broken peer: More fails than tasks number but atleast 3 - raise WorkerStop( - "Too many errors (hash failed: %s, connection error: %s)" % - (self.peer.hash_failed, self.peer.connection_error) - ) - - def handleTask(self, task): - download_err = write_err = False - - write_lock = None - try: - buff = self.downloadTask(task) - - if task["done"] is True: # Task done, try to find new one - return None - - if self.running is False: # Worker no longer needed or got killed - self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"])) - raise WorkerStop("Running got disabled") - - write_lock = self.getTaskLock(task) - write_lock.acquire() - if task["site"].content_manager.verifyFile(task["inner_path"], buff) is None: - is_same = True - else: - is_same = False - is_valid = True - except (WorkerDownloadError, VerifyError) as err: - download_err = err - is_valid = False - is_same = False - - if is_valid and not is_same: - if self.manager.started_task_num < 50 or task["priority"] > 10 or config.verbose: - self.manager.log.debug("%s: Verify correct: %s" % (self.key, task["inner_path"])) - try: - self.writeTask(task, buff) - except WorkerIOError as err: - write_err = err - - if not task["done"]: - if write_err: - self.manager.failTask(task, reason="Write error") - self.num_failed += 1 - self.manager.log.error("%s: Error writing %s: %s" % (self.key, task["inner_path"], write_err)) - elif is_valid: - self.manager.doneTask(task) - self.num_downloaded += 1 - - if write_lock is not None and write_lock.locked(): - write_lock.release() - - if not is_valid: - self.onTaskVerifyFail(task, download_err) - time.sleep(1) - return False - - return True - + # Downloader thread def downloader(self): self.peer.hash_failed = 0 # Reset hash error counter while self.running: # Try to pickup free file download task - task = self.pickTask() - - if not task: + task = self.manager.getTask(self.peer) + if not task: # Die, no more task + self.manager.log.debug("%s: No task found, stopping" % self.key) break + if not task["time_started"]: + task["time_started"] = time.time() # Task started now - if task["done"]: - continue - - self.task = task - - self.manager.addTaskWorker(task, self) - - try: - success = self.handleTask(task) - except WorkerStop as err: - self.manager.log.debug("%s: Worker stopped: %s" % (self.key, err)) - self.manager.removeTaskWorker(task, self) - break - - self.manager.removeTaskWorker(task, self) + if task["workers_num"] > 0: # Wait a bit if someone already working on it + self.manager.log.debug("%s: Someone already working on %s, sleeping 1 sec..." % (self.key, task["inner_path"])) + time.sleep(1) + self.manager.log.debug("%s: %s, task done after sleep: %s" % (self.key, task["inner_path"], task["done"])) + if task["done"] is False: + self.task = task + site = task["site"] + task["workers_num"] += 1 + try: + buff = self.peer.getFile(site.address, task["inner_path"]) + except Exception, err: + self.manager.log.debug("%s: getFile error: %s" % (self.key, err)) + buff = None + if self.running is False: # Worker no longer needed or got killed + self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"])) + break + if task["done"] is True: # Task done, try to find new one + continue + if buff: # Download ok + correct = site.content_manager.verifyFile(task["inner_path"], buff) + else: # Download error + correct = False + if correct is True or correct is None: # Hash ok or same file + self.manager.log.debug("%s: Hash correct: %s" % (self.key, task["inner_path"])) + if correct is True and task["done"] is False: # Save if changed and task not done yet + buff.seek(0) + site.storage.write(task["inner_path"], buff) + if task["done"] is False: + self.manager.doneTask(task) + task["workers_num"] -= 1 + self.task = None + else: # Hash failed + self.manager.log.debug( + "%s: Hash failed: %s, failed peers: %s" % + (self.key, task["inner_path"], len(task["failed"])) + ) + task["failed"].append(self.peer) + self.task = None + self.peer.hash_failed += 1 + if self.peer.hash_failed >= max(len(self.manager.tasks), 3) or self.peer.connection_error > 10: + # Broken peer: More fails than tasks number but atleast 3 + break + task["workers_num"] -= 1 + time.sleep(1) self.peer.onWorkerDone() self.running = False self.manager.removeWorker(self) @@ -223,17 +88,17 @@ class Worker(object): self.thread = gevent.spawn(self.downloader) # Skip current task - def skip(self, reason="Unknown"): - self.manager.log.debug("%s: Force skipping (reason: %s)" % (self.key, reason)) + def skip(self): + self.manager.log.debug("%s: Force skipping" % self.key) if self.thread: - self.thread.kill(exception=Debug.createNotifyType("Worker skipping (reason: %s)" % reason)) + self.thread.kill(exception=Debug.Notify("Worker stopped")) self.start() # Force stop the worker - def stop(self, reason="Unknown"): - self.manager.log.debug("%s: Force stopping (reason: %s)" % (self.key, reason)) + def stop(self): + self.manager.log.debug("%s: Force stopping" % self.key) self.running = False if self.thread: - self.thread.kill(exception=Debug.createNotifyType("Worker stopped (reason: %s)" % reason)) + self.thread.kill(exception=Debug.Notify("Worker stopped")) del self.thread self.manager.removeWorker(self) diff --git a/src/Worker/WorkerManager.py b/src/Worker/WorkerManager.py index f68e8410..379ac069 100644 --- a/src/Worker/WorkerManager.py +++ b/src/Worker/WorkerManager.py @@ -1,35 +1,29 @@ import time import logging +import random import collections import gevent -from .Worker import Worker -from .WorkerTaskManager import WorkerTaskManager -from Config import config +from Worker import Worker from util import helper -from Plugin import PluginManager -from Debug.DebugLock import DebugLock import util +MAX_WORKERS = 10 # Max concurent workers -@PluginManager.acceptPlugins -class WorkerManager(object): + +class WorkerManager: def __init__(self, site): self.site = site self.workers = {} # Key: ip:port, Value: Worker.Worker - self.tasks = WorkerTaskManager() - self.next_task_id = 1 - self.lock_add_task = DebugLock(name="Lock AddTask:%s" % self.site.address_short) - # {"id": 1, "evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "optional_hash_id": None, - # "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0, "failed": peer_ids, "lock": None or gevent.lock.RLock} + self.tasks = [] + # {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "optional_hash_id": None, + # "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0, "failed": peer_ids} self.started_task_num = 0 # Last added task num - self.asked_peers = [] self.running = True - self.time_task_added = 0 self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short) - self.site.greenlet_manager.spawn(self.checkTasks) + self.process_taskchecker = gevent.spawn(self.checkTasks) def __str__(self): return "WorkerManager %s" % self.site.address_short @@ -41,82 +35,76 @@ class WorkerManager(object): def checkTasks(self): while self.running: tasks = task = worker = workers = None # Cleanup local variables - announced = False time.sleep(15) # Check every 15 sec # Clean up workers - for worker in list(self.workers.values()): + for worker in self.workers.values(): if worker.task and worker.task["done"]: - worker.skip(reason="Task done") # Stop workers with task done + worker.skip() # Stop workers with task done if not self.tasks: continue tasks = self.tasks[:] # Copy it so removing elements wont cause any problem - num_tasks_started = len([task for task in tasks if task["time_started"]]) - - self.log.debug( - "Tasks: %s, started: %s, bad files: %s, total started: %s" % - (len(tasks), num_tasks_started, len(self.site.bad_files), self.started_task_num) - ) - for task in tasks: - if task["time_started"] and time.time() >= task["time_started"] + 60: - self.log.debug("Timeout, Skipping: %s" % task) # Task taking too long time, skip it + if task["time_started"] and time.time() >= task["time_started"] + 60: # Task taking too long time, skip it + self.log.debug("Timeout, Skipping: %s" % task) # Skip to next file workers workers = self.findWorkers(task) if workers: for worker in workers: - worker.skip(reason="Task timeout") + worker.skip() else: - self.failTask(task, reason="No workers") - + self.failTask(task) elif time.time() >= task["time_added"] + 60 and not self.workers: # No workers left - self.failTask(task, reason="Timeout") + self.log.debug("Timeout, Cleanup task: %s" % task) + # Remove task + self.failTask(task) elif (task["time_started"] and time.time() >= task["time_started"] + 15) or not self.workers: - # Find more workers: Task started more than 15 sec ago or no workers + # Task started more than 15 sec ago or no workers workers = self.findWorkers(task) self.log.debug( - "Slow task: %s, (workers: %s, optional_hash_id: %s, peers: %s, failed: %s, asked: %s)" % - ( - task["inner_path"], len(workers), task["optional_hash_id"], - len(task["peers"] or []), len(task["failed"]), len(self.asked_peers) - ) + "Task taking more than 15 secs, workers: %s find more peers: %s" % + (len(workers), task["inner_path"]) ) - if not announced and task["site"].isAddedRecently(): - task["site"].announce(mode="more") # Find more peers - announced = True + task["site"].announce(num=1) # Find more peers if task["optional_hash_id"]: - if self.workers: - if not task["time_started"]: - ask_limit = 20 - else: - ask_limit = max(10, time.time() - task["time_started"]) - if len(self.asked_peers) < ask_limit and len(task["peers"] or []) <= len(task["failed"]) * 2: - # Re-search for high priority - self.startFindOptional(find_more=True) - if task["peers"]: - peers_try = [peer for peer in task["peers"] if peer not in task["failed"] and peer not in workers] - if peers_try: - self.startWorkers(peers_try, force_num=5, reason="Task checker (optional, has peers)") - else: - self.startFindOptional(find_more=True) - else: - self.startFindOptional(find_more=True) + self.startFindOptional() else: if task["peers"]: # Release the peer lock self.log.debug("Task peer lock release: %s" % task["inner_path"]) task["peers"] = [] - self.startWorkers(reason="Task checker") - - if len(self.tasks) > len(self.workers) * 2 and len(self.workers) < self.getMaxWorkers(): - self.startWorkers(reason="Task checker (need more workers)") + self.startWorkers() + break # One reannounce per loop self.log.debug("checkTasks stopped running") + # Tasks sorted by this + def taskSorter(self, task): + inner_path = task["inner_path"] + if inner_path == "content.json": + return 9999 # Content.json always prority + if inner_path == "index.html": + return 9998 # index.html also important + priority = task["priority"] + if "-default" in inner_path: + priority -= 4 # Default files are cloning not important + elif inner_path.endswith(".css"): + priority += 5 # boost css files priority + elif inner_path.endswith(".js"): + priority += 4 # boost js files priority + elif inner_path.endswith("dbschema.json"): + priority += 3 # boost database specification + elif inner_path.endswith("content.json"): + priority += 1 # boost included content.json files priority a bit + elif inner_path.endswith(".json"): + priority += 2 # boost data json files priority more + return priority - task["workers_num"] * 5 # Prefer more priority and less workers + # Returns the next free or less worked task def getTask(self, peer): + self.tasks.sort(key=self.taskSorter, reverse=True) # Sort tasks by priority and worker numbers for task in self.tasks: # Find a task if task["peers"] and peer not in task["peers"]: continue # This peer not allowed to pick this task @@ -124,182 +112,125 @@ class WorkerManager(object): continue # Peer already tried to solve this, but failed if task["optional_hash_id"] and task["peers"] is None: continue # No peers found yet for the optional task - if task["done"]: - continue return task - def removeSolvedFileTasks(self, mark_as_good=True): + def removeGoodFileTasks(self): for task in self.tasks[:]: if task["inner_path"] not in self.site.bad_files: - self.log.debug("No longer in bad_files, marking as %s: %s" % (mark_as_good, task["inner_path"])) + self.log.debug("No longer in bad_files, marking as good: %s" % task["inner_path"]) task["done"] = True - task["evt"].set(mark_as_good) + task["evt"].set(True) self.tasks.remove(task) if not self.tasks: self.started_task_num = 0 self.site.updateWebsocket() + # New peers added to site def onPeers(self): - self.startWorkers(reason="More peers found") - - def getMaxWorkers(self): - if len(self.tasks) > 50: - return config.workers * 3 - else: - return config.workers + self.startWorkers() # Add new worker - def addWorker(self, peer, multiplexing=False, force=False): + def addWorker(self, peer): key = peer.key - if len(self.workers) > self.getMaxWorkers() and not force: - return False - if multiplexing: # Add even if we already have worker for this peer - key = "%s/%s" % (key, len(self.workers)) - if key not in self.workers: + if key not in self.workers and len(self.workers) < MAX_WORKERS: # We dont have worker for that peer and workers num less than max - task = self.getTask(peer) - if task: - worker = Worker(self, peer) - self.workers[key] = worker - worker.key = key - worker.start() - return worker - else: - return False - else: # We have worker for this peer or its over the limit + worker = Worker(self, peer) + self.workers[key] = worker + worker.key = key + worker.start() + return worker + else: # We have woker for this peer or its over the limit return False - def taskAddPeer(self, task, peer): - if task["peers"] is None: - task["peers"] = [] - if peer in task["failed"]: - return False - - if peer not in task["peers"]: - task["peers"].append(peer) - return True - # Start workers to process tasks - def startWorkers(self, peers=None, force_num=0, reason="Unknown"): + def startWorkers(self, peers=None): if not self.tasks: return False # No task for workers - max_workers = min(self.getMaxWorkers(), len(self.site.peers)) - if len(self.workers) >= max_workers and not peers: - return False # Workers number already maxed and no starting peers defined - self.log.debug( - "Starting workers (%s), tasks: %s, peers: %s, workers: %s" % - (reason, len(self.tasks), len(peers or []), len(self.workers)) - ) + if len(self.workers) >= MAX_WORKERS and not peers: + return False # Workers number already maxed and no starting peers definied if not peers: - peers = self.site.getConnectedPeers() - if len(peers) < max_workers: - peers += self.site.getRecentPeers(max_workers * 2) + peers = self.site.peers.values() # No peers definied, use any from site if type(peers) is set: peers = list(peers) - # Sort by ping - peers.sort(key=lambda peer: peer.connection.last_ping_delay if peer.connection and peer.connection.last_ping_delay and len(peer.connection.waiting_requests) == 0 and peer.connection.connected else 9999) - + random.shuffle(peers) for peer in peers: # One worker for every peer if peers and peer not in peers: - continue # If peers defined and peer not valid - - if force_num: - worker = self.addWorker(peer, force=True) - force_num -= 1 - else: - worker = self.addWorker(peer) - + continue # If peers definied and peer not valid + worker = self.addWorker(peer) if worker: - self.log.debug("Added worker: %s (rep: %s), workers: %s/%s" % (peer.key, peer.reputation, len(self.workers), max_workers)) + self.log.debug("Added worker: %s, workers: %s/%s" % (peer.key, len(self.workers), MAX_WORKERS)) # Find peers for optional hash in local hash tables and add to task peers - def findOptionalTasks(self, optional_tasks, reset_task=False): + def findOptionalTasks(self, optional_tasks): found = collections.defaultdict(list) # { found_hash: [peer1, peer2...], ...} - for peer in list(self.site.peers.values()): - if not peer.has_hashfield: + for peer in self.site.peers.values(): + if not peer.hashfield: continue - hashfield_set = set(peer.hashfield) # Finding in set is much faster for task in optional_tasks: optional_hash_id = task["optional_hash_id"] - if optional_hash_id in hashfield_set: - if reset_task and len(task["failed"]) > 0: - task["failed"] = [] - if peer in task["failed"]: - continue - if self.taskAddPeer(task, peer): - found[optional_hash_id].append(peer) + if optional_hash_id in peer.hashfield: + found[optional_hash_id].append(peer) + if task["peers"] and peer not in task["peers"]: + task["peers"].append(peer) + else: + task["peers"] = [peer] return found # Find peers for optional hash ids in local hash tables - def findOptionalHashIds(self, optional_hash_ids, limit=0): + def findOptionalHashIds(self, optional_hash_ids): found = collections.defaultdict(list) # { found_hash_id: [peer1, peer2...], ...} - for peer in list(self.site.peers.values()): - if not peer.has_hashfield: + for peer in self.site.peers.values(): + if not peer.hashfield: continue - - hashfield_set = set(peer.hashfield) # Finding in set is much faster for optional_hash_id in optional_hash_ids: - if optional_hash_id in hashfield_set: + if optional_hash_id in peer.hashfield: found[optional_hash_id].append(peer) - if limit and len(found[optional_hash_id]) >= limit: - optional_hash_ids.remove(optional_hash_id) return found # Add peers to tasks from found result def addOptionalPeers(self, found_ips): found = collections.defaultdict(list) - for hash_id, peer_ips in found_ips.items(): + for hash_id, peer_ips in found_ips.iteritems(): task = [task for task in self.tasks if task["optional_hash_id"] == hash_id] if task: # Found task, lets take the first task = task[0] else: continue for peer_ip in peer_ips: - peer = self.site.addPeer(peer_ip[0], peer_ip[1], return_peer=True, source="optional") + peer = self.site.addPeer(peer_ip[0], peer_ip[1], return_peer=True) if not peer: continue - if self.taskAddPeer(task, peer): - found[hash_id].append(peer) + if task["peers"] is None: + task["peers"] = [] + if peer not in task["peers"]: + task["peers"].append(peer) if peer.hashfield.appendHashId(hash_id): # Peer has this file peer.time_hashfield = None # Peer hashfield probably outdated + found[hash_id].append(peer) return found # Start find peers for optional files - @util.Noparallel(blocking=False, ignore_args=True) - def startFindOptional(self, reset_task=False, find_more=False, high_priority=False): - # Wait for more file requests - if len(self.tasks) < 20 or high_priority: - time.sleep(0.01) - elif len(self.tasks) > 90: - time.sleep(5) - else: - time.sleep(0.5) - + @util.Noparallel(blocking=False) + def startFindOptional(self): + time.sleep(0.01) # Wait for more file requests optional_tasks = [task for task in self.tasks if task["optional_hash_id"]] - if not optional_tasks: - return False optional_hash_ids = set([task["optional_hash_id"] for task in optional_tasks]) - time_tasks = self.time_task_added - - self.log.debug( - "Finding peers for optional files: %s (reset_task: %s, find_more: %s)" % - (optional_hash_ids, reset_task, find_more) - ) - found = self.findOptionalTasks(optional_tasks, reset_task=reset_task) + self.log.debug("Finding peers for optional files: %s" % optional_hash_ids) + found = self.findOptionalTasks(optional_tasks) if found: - found_peers = set([peer for peers in list(found.values()) for peer in peers]) - self.startWorkers(found_peers, force_num=3, reason="Optional found in local peers") + found_peers = set([peer for peers in found.values() for peer in peers]) + self.startWorkers(found_peers) - if len(found) < len(optional_hash_ids) or find_more or (high_priority and any(len(peers) < 10 for peers in found.values())): + if len(found) < len(optional_hash_ids): self.log.debug("No local result for optional files: %s" % (optional_hash_ids - set(found))) # Query hashfield from connected peers @@ -308,114 +239,54 @@ class WorkerManager(object): if not peers: peers = self.site.getConnectablePeers() for peer in peers: - threads.append(self.site.greenlet_manager.spawn(peer.updateHashfield, force=find_more)) + if not peer.time_hashfield: + threads.append(gevent.spawn(peer.updateHashfield)) gevent.joinall(threads, timeout=5) - if time_tasks != self.time_task_added: # New task added since start - optional_tasks = [task for task in self.tasks if task["optional_hash_id"]] - optional_hash_ids = set([task["optional_hash_id"] for task in optional_tasks]) - found = self.findOptionalTasks(optional_tasks) - self.log.debug("Found optional files after query hashtable connected peers: %s/%s" % ( - len(found), len(optional_hash_ids) - )) + self.log.debug("Found optional files after query hashtable connected peers: %s/%s" % (len(found), len(optional_hash_ids))) if found: - found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers]) - self.startWorkers(found_peers, force_num=3, reason="Optional found in connected peers") + found_peers = set([peer for hash_id_peers in found.values() for peer in hash_id_peers]) + self.startWorkers(found_peers) - if len(found) < len(optional_hash_ids) or find_more: - self.log.debug( - "No connected hashtable result for optional files: %s (asked: %s)" % - (optional_hash_ids - set(found), len(self.asked_peers)) - ) - if not self.tasks: - self.log.debug("No tasks, stopping finding optional peers") - return + if len(found) < len(optional_hash_ids): + self.log.debug("No connected hashtable result for optional files: %s" % (optional_hash_ids - set(found))) # Try to query connected peers threads = [] - peers = [peer for peer in self.site.getConnectedPeers() if peer.key not in self.asked_peers][0:10] + peers = self.site.getConnectedPeers() if not peers: - peers = self.site.getConnectablePeers(ignore=self.asked_peers) + peers = self.site.getConnectablePeers() for peer in peers: - threads.append(self.site.greenlet_manager.spawn(peer.findHashIds, list(optional_hash_ids))) - self.asked_peers.append(peer.key) + threads.append(gevent.spawn(peer.findHashIds, list(optional_hash_ids))) - for i in range(5): - time.sleep(1) - - thread_values = [thread.value for thread in threads if thread.value] - if not thread_values: - continue - - found_ips = helper.mergeDicts(thread_values) - found = self.addOptionalPeers(found_ips) - self.log.debug("Found optional files after findhash connected peers: %s/%s (asked: %s)" % ( - len(found), len(optional_hash_ids), len(threads) - )) - - if found: - found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers]) - self.startWorkers(found_peers, force_num=3, reason="Optional found by findhash connected peers") - - if len(thread_values) == len(threads): - # Got result from all started thread - break - - if len(found) < len(optional_hash_ids): - self.log.debug( - "No findHash result, try random peers: %s (asked: %s)" % - (optional_hash_ids - set(found), len(self.asked_peers)) - ) - # Try to query random peers - - if time_tasks != self.time_task_added: # New task added since start - optional_tasks = [task for task in self.tasks if task["optional_hash_id"]] - optional_hash_ids = set([task["optional_hash_id"] for task in optional_tasks]) - - threads = [] - peers = self.site.getConnectablePeers(ignore=self.asked_peers) - - for peer in peers: - threads.append(self.site.greenlet_manager.spawn(peer.findHashIds, list(optional_hash_ids))) - self.asked_peers.append(peer.key) - - gevent.joinall(threads, timeout=15) + gevent.joinall(threads, timeout=5) found_ips = helper.mergeDicts([thread.value for thread in threads if thread.value]) found = self.addOptionalPeers(found_ips) - self.log.debug("Found optional files after findhash random peers: %s/%s" % (len(found), len(optional_hash_ids))) + self.log.debug("Found optional files after findhash connected peers: %s/%s" % (len(found), len(optional_hash_ids))) if found: - found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers]) - self.startWorkers(found_peers, force_num=3, reason="Option found using findhash random peers") + found_peers = set([peer for hash_id_peers in found.values() for peer in hash_id_peers]) + self.startWorkers(found_peers) if len(found) < len(optional_hash_ids): self.log.debug("No findhash result for optional files: %s" % (optional_hash_ids - set(found))) - if time_tasks != self.time_task_added: # New task added since start - self.log.debug("New task since start, restarting...") - self.site.greenlet_manager.spawnLater(0.1, self.startFindOptional) - else: - self.log.debug("startFindOptional ended") - # Stop all worker def stopWorkers(self): - num = 0 - for worker in list(self.workers.values()): - worker.stop(reason="Stopping all workers") - num += 1 + for worker in self.workers.values(): + worker.stop() tasks = self.tasks[:] # Copy for task in tasks: # Mark all current task as failed - self.failTask(task, reason="Stopping all workers") - return num + self.failTask(task) # Find workers by task def findWorkers(self, task): workers = [] - for worker in list(self.workers.values()): + for worker in self.workers.values(): if worker.task == task: workers.append(worker) return workers @@ -425,176 +296,80 @@ class WorkerManager(object): worker.running = False if worker.key in self.workers: del(self.workers[worker.key]) - self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), self.getMaxWorkers())) - if len(self.workers) <= self.getMaxWorkers() / 3 and len(self.asked_peers) < 10: - optional_task = next((task for task in self.tasks if task["optional_hash_id"]), None) - if optional_task: - if len(self.workers) == 0: - self.startFindOptional(find_more=True) - else: - self.startFindOptional() - elif self.tasks and not self.workers and worker.task and len(worker.task["failed"]) < 20: - self.log.debug("Starting new workers... (tasks: %s)" % len(self.tasks)) - self.startWorkers(reason="Removed worker") + self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), MAX_WORKERS)) - # Tasks sorted by this - def getPriorityBoost(self, inner_path): - if inner_path == "content.json": - return 9999 # Content.json always priority - if inner_path == "index.html": - return 9998 # index.html also important - if "-default" in inner_path: - return -4 # Default files are cloning not important - elif inner_path.endswith("all.css"): - return 14 # boost css files priority - elif inner_path.endswith("all.js"): - return 13 # boost js files priority - elif inner_path.endswith("dbschema.json"): - return 12 # boost database specification - elif inner_path.endswith("content.json"): - return 1 # boost included content.json files priority a bit - elif inner_path.endswith(".json"): - if len(inner_path) < 50: # Boost non-user json files - return 11 + # Create new task and return asyncresult + def addTask(self, inner_path, peer=None, priority=0): + self.site.onFileStart(inner_path) # First task, trigger site download started + task = self.findTask(inner_path) + if task: # Already has task for that file + if peer and task["peers"]: # This peer also has new version, add it to task possible peers + task["peers"].append(peer) + self.log.debug("Added peer %s to %s" % (peer.key, task["inner_path"])) + self.startWorkers([peer]) + elif peer and peer in task["failed"]: + task["failed"].remove(peer) # New update arrived, remove the peer from failed peers + self.log.debug("Removed peer %s from failed %s" % (peer.key, task["inner_path"])) + self.startWorkers([peer]) + + if priority: + task["priority"] += priority # Boost on priority + return task["evt"] + else: # No task for that file yet + evt = gevent.event.AsyncResult() + if peer: + peers = [peer] # Only download from this peer else: - return 2 - return 0 - - def addTaskUpdate(self, task, peer, priority=0): - if priority > task["priority"]: - self.tasks.updateItem(task, "priority", priority) - if peer and task["peers"]: # This peer also has new version, add it to task possible peers - task["peers"].append(peer) - self.log.debug("Added peer %s to %s" % (peer.key, task["inner_path"])) - self.startWorkers([peer], reason="Added new task (update received by peer)") - elif peer and peer in task["failed"]: - task["failed"].remove(peer) # New update arrived, remove the peer from failed peers - self.log.debug("Removed peer %s from failed %s" % (peer.key, task["inner_path"])) - self.startWorkers([peer], reason="Added new task (peer failed before)") - - def addTaskCreate(self, inner_path, peer, priority=0, file_info=None): - evt = gevent.event.AsyncResult() - if peer: - peers = [peer] # Only download from this peer - else: - peers = None - if not file_info: + peers = None file_info = self.site.content_manager.getFileInfo(inner_path) - if file_info and file_info["optional"]: - optional_hash_id = helper.toHashId(file_info["sha512"]) - else: - optional_hash_id = None - if file_info: - size = file_info.get("size", 0) - else: - size = 0 + if file_info and file_info["optional"]: + optional_hash_id = helper.toHashId(file_info["sha512"]) + else: + optional_hash_id = None + task = { + "evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "optional_hash_id": optional_hash_id, + "time_added": time.time(), "time_started": None, "time_action": None, "peers": peers, "priority": priority, "failed": [] + } - self.lock_add_task.acquire() + self.tasks.append(task) - # Check again if we have task for this file - task = self.tasks.findTask(inner_path) - if task: - self.addTaskUpdate(task, peer, priority) - return task - - priority += self.getPriorityBoost(inner_path) - - if self.started_task_num == 0: # Boost priority for first requested file - priority += 1 - - task = { - "id": self.next_task_id, "evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, - "optional_hash_id": optional_hash_id, "time_added": time.time(), "time_started": None, "lock": None, - "time_action": None, "peers": peers, "priority": priority, "failed": [], "size": size - } - - self.tasks.append(task) - self.lock_add_task.release() - - self.next_task_id += 1 - self.started_task_num += 1 - if config.verbose: + self.started_task_num += 1 self.log.debug( - "New task: %s, peer lock: %s, priority: %s, optional_hash_id: %s, tasks started: %s" % + "New task: %s, peer lock: %s, priority: %s, optional_hash_id: %s, tasks: %s" % (task["inner_path"], peers, priority, optional_hash_id, self.started_task_num) ) - self.time_task_added = time.time() + if optional_hash_id: + self.startFindOptional() + if peers: + self.startWorkers(peers) + else: + self.startWorkers(peers) + return evt - if optional_hash_id: - if self.asked_peers: - del self.asked_peers[:] # Reset asked peers - self.startFindOptional(high_priority=priority > 0) + # Find a task using inner_path + def findTask(self, inner_path): + for task in self.tasks: + if task["inner_path"] == inner_path: + return task + return None # Not found - if peers: - self.startWorkers(peers, reason="Added new optional task") - - else: - self.startWorkers(peers, reason="Added new task") - return task - - # Create new task and return asyncresult - def addTask(self, inner_path, peer=None, priority=0, file_info=None): - self.site.onFileStart(inner_path) # First task, trigger site download started - task = self.tasks.findTask(inner_path) - if task: # Already has task for that file - self.addTaskUpdate(task, peer, priority) - else: # No task for that file yet - task = self.addTaskCreate(inner_path, peer, priority, file_info) - return task - - def addTaskWorker(self, task, worker): - try: - self.tasks.updateItem(task, "workers_num", task["workers_num"] + 1) - except ValueError: - task["workers_num"] += 1 - - def removeTaskWorker(self, task, worker): - try: - self.tasks.updateItem(task, "workers_num", task["workers_num"] - 1) - except ValueError: - task["workers_num"] -= 1 - if len(task["failed"]) >= len(self.workers): - fail_reason = "Too many fails: %s (workers: %s)" % (len(task["failed"]), len(self.workers)) - self.failTask(task, reason=fail_reason) - - # Wait for other tasks - def checkComplete(self): - time.sleep(0.1) - if not self.tasks: - self.log.debug("Check complete: No tasks") - self.onComplete() - - def onComplete(self): - self.started_task_num = 0 - del self.asked_peers[:] - self.site.onComplete() # No more task trigger site complete + # Mark a task failed + def failTask(self, task): + if task in self.tasks: + task["done"] = True + self.tasks.remove(task) # Remove from queue + self.site.onFileFail(task["inner_path"]) + task["evt"].set(False) + if not self.tasks: + self.started_task_num = 0 # Mark a task done def doneTask(self, task): task["done"] = True self.tasks.remove(task) # Remove from queue - if task["optional_hash_id"]: - self.log.debug( - "Downloaded optional file in %.3fs, adding to hashfield: %s" % - (time.time() - task["time_started"], task["inner_path"]) - ) - self.site.content_manager.optionalDownloaded(task["inner_path"], task["optional_hash_id"], task["size"]) self.site.onFileDone(task["inner_path"]) task["evt"].set(True) if not self.tasks: - self.site.greenlet_manager.spawn(self.checkComplete) - - # Mark a task failed - def failTask(self, task, reason="Unknown"): - try: - self.tasks.remove(task) # Remove from queue - except ValueError as err: - return False - - self.log.debug("Task %s failed (Reason: %s)" % (task["inner_path"], reason)) - task["done"] = True - self.site.onFileFail(task["inner_path"]) - task["evt"].set(False) - if not self.tasks: - self.site.greenlet_manager.spawn(self.checkComplete) + self.started_task_num = 0 + self.site.onComplete() # No more task trigger site complete diff --git a/src/Worker/WorkerTaskManager.py b/src/Worker/WorkerTaskManager.py deleted file mode 100644 index 9359701d..00000000 --- a/src/Worker/WorkerTaskManager.py +++ /dev/null @@ -1,122 +0,0 @@ -import bisect -from collections.abc import MutableSequence - - -class CustomSortedList(MutableSequence): - def __init__(self): - super().__init__() - self.items = [] # (priority, added index, actual value) - self.logging = False - - def __repr__(self): - return "<{0} {1}>".format(self.__class__.__name__, self.items) - - def __len__(self): - return len(self.items) - - def __getitem__(self, index): - if type(index) is int: - return self.items[index][2] - else: - return [item[2] for item in self.items[index]] - - def __delitem__(self, index): - del self.items[index] - - def __setitem__(self, index, value): - self.items[index] = self.valueToItem(value) - - def __str__(self): - return str(self[:]) - - def insert(self, index, value): - self.append(value) - - def append(self, value): - bisect.insort(self.items, self.valueToItem(value)) - - def updateItem(self, value, update_key=None, update_value=None): - self.remove(value) - if update_key is not None: - value[update_key] = update_value - self.append(value) - - def sort(self, *args, **kwargs): - raise Exception("Sorted list can't be sorted") - - def valueToItem(self, value): - return (self.getPriority(value), self.getId(value), value) - - def getPriority(self, value): - return value - - def getId(self, value): - return id(value) - - def indexSlow(self, value): - for pos, item in enumerate(self.items): - if item[2] == value: - return pos - return None - - def index(self, value): - item = (self.getPriority(value), self.getId(value), value) - bisect_pos = bisect.bisect(self.items, item) - 1 - if bisect_pos >= 0 and self.items[bisect_pos][2] == value: - return bisect_pos - - # Item probably changed since added, switch to slow iteration - pos = self.indexSlow(value) - - if self.logging: - print("Slow index for %s in pos %s bisect: %s" % (item[2], pos, bisect_pos)) - - if pos is None: - raise ValueError("%r not in list" % value) - else: - return pos - - def __contains__(self, value): - try: - self.index(value) - return True - except ValueError: - return False - - -class WorkerTaskManager(CustomSortedList): - def __init__(self): - super().__init__() - self.inner_paths = {} - - def getPriority(self, value): - return 0 - (value["priority"] - value["workers_num"] * 10) - - def getId(self, value): - return value["id"] - - def __contains__(self, value): - return value["inner_path"] in self.inner_paths - - def __delitem__(self, index): - # Remove from inner path cache - del self.inner_paths[self.items[index][2]["inner_path"]] - super().__delitem__(index) - - # Fast task search by inner_path - - def append(self, task): - if task["inner_path"] in self.inner_paths: - raise ValueError("File %s already has a task" % task["inner_path"]) - super().append(task) - # Create inner path cache for faster lookup by filename - self.inner_paths[task["inner_path"]] = task - - def remove(self, task): - if task not in self: - raise ValueError("%r not in list" % task) - else: - super().remove(task) - - def findTask(self, inner_path): - return self.inner_paths.get(inner_path, None) diff --git a/src/Worker/__init__.py b/src/Worker/__init__.py index f4d20a96..26649852 100644 --- a/src/Worker/__init__.py +++ b/src/Worker/__init__.py @@ -1,2 +1,2 @@ -from .Worker import Worker -from .WorkerManager import WorkerManager +from Worker import Worker +from WorkerManager import WorkerManager \ No newline at end of file diff --git a/src/lib/BitcoinECC/BitcoinECC.py b/src/lib/BitcoinECC/BitcoinECC.py new file mode 100644 index 00000000..6f481132 --- /dev/null +++ b/src/lib/BitcoinECC/BitcoinECC.py @@ -0,0 +1,467 @@ +# By: HurlSly +# Source: https://github.com/HurlSly/Python/blob/master/BitcoinECC.py +# Modified: random number generator in def GeneratePrivateKey(self): + +import random +import hashlib +import os + +class GaussInt: + #A class for the Gauss integers of the form a + b sqrt(n) where a,b are integers. + #n can be positive or negative. + def __init__(self,x,y,n,p=0): + if p: + self.x=x%p + self.y=y%p + self.n=n%p + else: + self.x=x + self.y=y + self.n=n + + self.p=p + + def __add__(self,b): + return GaussInt(self.x+b.x,self.y+b.y,self.n,self.p) + + def __sub__(self,b): + return GaussInt(self.x-b.x,self.y-b.y,self.n,self.p) + + def __mul__(self,b): + return GaussInt(self.x*b.x+self.n*self.y*b.y,self.x*b.y+self.y*b.x,self.n,self.p) + + def __div__(self,b): + return GaussInt((self.x*b.x-self.n*self.y*b.y)/(b.x*b.x-self.n*b.y*b.y),(-self.x*b.y+self.y*b.x)/(b.x*b.x-self.n*b.y*b.y),self.n,self.p) + + def __eq__(self,b): + return self.x==b.x and self.y==b.y + + def __repr__(self): + if self.p: + return "%s+%s (%d,%d)"%(self.x,self.y,self.n,self.p) + else: + return "%s+%s (%d)"%(self.x,self.y,self.n) + + def __pow__(self,n): + b=Base(n,2) + t=GaussInt(1,0,self.n) + while b: + t=t*t + if b.pop(): + t=self*t + + return t + + def Inv(self): + return GaussInt(self.x/(self.x*self.x-self.n*self.y*self.y),-self.y/(self.x*self.x-self.n*self.y*self.y),self.n,self.p) + +def Cipolla(a,p): + #Find a square root of a modulo p using the algorithm of Cipolla + b=0 + while pow((b*b-a)%p,(p-1)/2,p)==1: + b+=1 + + return (GaussInt(b,1,b**2-a,p)**((p+1)/2)).x + +def Base(n,b): + #Decompose n in base b + l=[] + while n: + l.append(n%b) + n/=b + + return l + +def InvMod(a,n): + #Find the inverse mod n of a. + #Use the Extended Euclides Algorithm. + m=[] + + s=n + while n: + m.append(a/n) + (a,n)=(n,a%n) + + u=1 + v=0 + while m: + (u,v)=(v,u-m.pop()*v) + + return u%s + +def b58encode(v): + #Encode a byte string to the Base58 + digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + base=len(digit) + val=0 + for c in v: + val*=256 + val+=ord(c) + + result="" + while val: + (val,mod)=divmod(val,base) + result=digit[mod]+result + + pad=0 + for c in v: + if c=="\0": + pad+=1 + else: + break + + return (digit[0]*pad)+result + +def b58decode(v): + #Decode a Base58 string to byte string + digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + base=len(digit) + val=0 + for c in v: + val*=base + val+=digit.find(c) + + result="" + while val: + (val,mod)=divmod(val,256) + result=chr(mod)+result + + pad=0 + for c in v: + if c==digit[0]: + pad+=1 + else: + break + + result="\0"*pad+result + + return result + +def Byte2Hex(b): + #Convert a byte string to hex number + out="" + for x in b: + y=hex(ord(x))[2:] + if len(y)==1: + y="0"+y + out+="%2s"%y + + return out + +def Int2Byte(n,b): + #Convert a integer to a byte string of length b + out="" + + for i in range(b): + (n,m)=divmod(n,256) + out=chr(m)+out + + return out + +class EllipticCurvePoint: + #Main class + #It is an point on an Elliptic Curve + + def __init__(self,x,a,b,p,n=0): + #We store the coordinate in x and the elliptic curbe parameter. + #x is of length 3. This is the 3 projective coordinates of the point. + self.x=x[:] + self.a=a + self.b=b + self.p=p + self.n=n + + def EqualProj(self,y): + #Does y equals self ? + #It computes self cross product with y and check if the result is 0. + return self.x[0]*y.x[1]==self.x[1]*y.x[0] and self.x[1]*y.x[2]==self.x[2]*y.x[1] and self.x[2]*y.x[0]==self.x[0]*y.x[2] + + def __add__(self,y): + #The main function to add self and y + #It uses the formulas I derived in projective coordinates. + #Projectives coordinates are more performant than the usual (x,y) coordinates + #because it we don't need to compute inverse mod p, which is faster. + z=EllipticCurvePoint([0,0,0],self.a,self.b,self.p) + + if self.EqualProj(y): + d=(2*self.x[1]*self.x[2])%self.p + d3=pow(d,3,self.p) + n=(3*pow(self.x[0],2,self.p)+self.a*pow(self.x[2],2,self.p))%self.p + + z.x[0]=(pow(n,2,self.p)*d*self.x[2]-2*d3*self.x[0])%self.p + z.x[1]=(3*self.x[0]*n*pow(d,2,self.p)-pow(n,3,self.p)*self.x[2]-self.x[1]*d3)%self.p + z.x[2]=(self.x[2]*d3)%self.p + else: + d=(y.x[0]*self.x[2]-y.x[2]*self.x[0])%self.p + d3=pow(d,3,self.p) + n=(y.x[1]*self.x[2]-self.x[1]*y.x[2])%self.p + + z.x[0]=(y.x[2]*self.x[2]*pow(n,2,self.p)*d-d3*(y.x[2]*self.x[0]+y.x[0]*self.x[2]))%self.p + z.x[1]=(pow(d,2,self.p)*n*(2*self.x[0]*y.x[2]+y.x[0]*self.x[2])-pow(n,3,self.p)*self.x[2]*y.x[2]-self.x[1]*d3*y.x[2])%self.p + z.x[2]=(self.x[2]*d3*y.x[2])%self.p + + return z + + def __mul__(self,n): + #The fast multiplication of point n times by itself. + b=Base(n,2) + t=EllipticCurvePoint(self.x,self.a,self.b,self.p) + b.pop() + while b: + t+=t + if b.pop(): + t+=self + + return t + + def __repr__(self): + #print a point in (x,y) coordinate. + return "x=%d\ny=%d\n"%((self.x[0]*InvMod(self.x[2],self.p))%self.p,(self.x[1]*InvMod(self.x[2],self.p))%self.p) + + def __eq__(self,x): + #Does self==x ? + return self.x==x.x and self.a==x.a and self.b==x.b and self.p==x.p + + def __ne__(self,x): + #Does self!=x ? + return self.x!=x.x or self.a!=x.a or self.b!=x.b or self.p!=x.p + + def Check(self): + #Is self on the curve ? + return (self.x[0]**3+self.a*self.x[0]*self.x[2]**2+self.b*self.x[2]**3-self.x[1]**2*self.x[2])%self.p==0 + + def GeneratePrivateKey(self): + #Generate a private key. It's just a random number between 1 and n-1. + #Of course, this function isn't cryptographically secure. + #Don't use it to generate your key. Use a cryptographically secure source of randomness instead. + #self.d = random.randint(1,self.n-1) + self.d = random.SystemRandom().randint(1,self.n-1) # Better random fix + + def SignECDSA(self,m): + #Sign a message. The private key is self.d . + h=hashlib.new("SHA256") + h.update(m) + z=int(h.hexdigest(),16) + + r=0 + s=0 + while not r or not s: + #k=random.randint(1,self.n-1) + k=random.SystemRandom().randint(1,self.n-1) # Better random fix + R=self*k + R.Normalize() + r=R.x[0]%self.n + s=(InvMod(k,self.n)*(z+r*self.d))%self.n + + return (r,s) + + def CheckECDSA(self,sig,m): + #Check a signature (r,s) of the message m using the public key self.Q + # and the generator which is self. + #This is not the one used by Bitcoin because the public key isn't known; + # only a hash of the public key is known. See the next function. + (r,s)=sig + + h=hashlib.new("SHA256") + h.update(m) + z=int(h.hexdigest(),16) + + if self.Q.x[2]==0: + return False + if not self.Q.Check(): + return False + if (self.Q*self.n).x[2]!=0: + return False + if r<1 or r>self.n-1 or s<1 or s>self.n-1: + return False + + w=InvMod(s,self.n) + u1=(z*w)%self.n + u2=(r*w)%self.n + R=self*u1+self.Q*u2 + R.Normalize() + + return (R.x[0]-r)%self.n==0 + + def VerifyMessageFromBitcoinAddress(self,adresse,m,sig): + #Check a signature (r,s) for the message m signed by the Bitcoin + # address "addresse". + h=hashlib.new("SHA256") + h.update(m) + z=int(h.hexdigest(),16) + + (r,s)=sig + x=r + y2=(pow(x,3,self.p)+self.a*x+self.b)%self.p + y=Cipolla(y2,self.p) + + for i in range(2): + kG=EllipticCurvePoint([x,y,1],self.a,self.b,self.p,self.n) + mzG=self*((-z)%self.n) + self.Q=(kG*s+mzG)*InvMod(r,self.n) + + adr=self.BitcoinAddresFromPublicKey() + if adr==adresse: + break + y=(-y)%self.p + + if adr!=adresse: + return False + + return True + + def BitcoinAddressFromPrivate(self,pri=None): + #Transform a private key in base58 encoding to a bitcoin address. + #normal means "uncompressed". + if not pri: + print "Private Key :", + pri=raw_input() + + normal=(len(pri)==51) + pri=b58decode(pri) + + if normal: + pri=pri[1:-4] + else: + pri=pri[1:-5] + + self.d=int(Byte2Hex(pri),16) + + return self.BitcoinAddress(normal) + + def PrivateEncoding(self,normal=True): + #Encode a private key self.d to base58 encoding. + p=Int2Byte(self.d,32) + p="\80"+p + + if not normal: + p+=chr(1) + + h=hashlib.new("SHA256") + h.update(p) + s=h.digest() + + h=hashlib.new("SHA256") + h.update(s) + s=h.digest() + + cs=s[:4] + + p+=cs + p=b58encode(p) + + return p + + def BitcoinAddresFromPublicKey(self,normal=True): + #Find the bitcoin address from the public key self.Q + #We do normalization to go from the projective coordinates to the usual + # (x,y) coordinates. + self.Q.Normalize() + if normal: + pk=chr(4)+Int2Byte(self.Q.x[0],32)+Int2Byte((self.Q.x[1])%self.p,32) + else: + if self.Q.x[1]%2==0: + pk=chr(2)+Int2Byte(self.Q.x[0],32) + else: + pk=chr(3)+Int2Byte(self.Q.x[0],32) + + version=chr(0) + + h=hashlib.new("SHA256") + h.update(pk) + s=h.digest() + + h=hashlib.new("RIPEMD160") + h.update(s) + kh=version+h.digest() + + h=hashlib.new("SHA256") + h.update(kh) + cs=h.digest() + + h=hashlib.new("SHA256") + h.update(cs) + cs=h.digest()[:4] + + adr=b58encode(kh+cs) + + return adr + + def BitcoinAddress(self,normal=True): + #Computes a bitcoin address given the private key self.d. + self.Q=self*self.d + + return self.BitcoinAddresFromPublicKey(normal) + + def BitcoinAddressGenerator(self,k,filename): + #Generate Bitcoin address and write them in the filename in the multibit format. + #Change the date as you like. + f=open(filename,"w") + for i in range(k): + self.GeneratePrivateKey() + adr=self.BitcoinAddress() + p=self.PrivateEncoding() + f.write("#%s\n%s 2014-01-30T12:00:00Z\n"%(adr,p)) + + #print hex(self.d) + print adr,p + + f.close() + + def TestSign(self): + #Test signature + self.GeneratePrivateKey() + self.Q=self*self.d + m="Hello World" + adresse=self.BitcoinAddresFromPublicKey() + (r,s)=self.SignECDSA(m) + + m="Hello World" + print self.VerifyMessageFromBitcoinAddress(adresse,m,r,s) + + def Normalize(self): + #Transform projective coordinates of self to the usual (x,y) coordinates. + if self.x[2]: + self.x[0]=(self.x[0]*InvMod(self.x[2],self.p))%self.p + self.x[1]=(self.x[1]*InvMod(self.x[2],self.p))%self.p + self.x[2]=1 + elif self.x[1]: + self.x[0]=(self.x[0]*InvMod(self.x[1],self.p))%self.p + self.x[1]=1 + elif self.x[0]: + self.x[0]=1 + else: + raise Exception + +def Bitcoin(): + #Create the Bitcoin elliptiv curve + a=0 + b=7 + p=2**256-2**32-2**9-2**8-2**7-2**6-2**4-1 + + #Create the generator G of the Bitcoin elliptic curve, with is order n. + Gx=int("79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",16) + Gy=int("483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8",16) + n =int("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141",16) + + #Create the generator + return EllipticCurvePoint([Gx,Gy,1],a,b,p,n) + + +if __name__ == "__main__": + bitcoin=Bitcoin() + + #Generate the public key from the private one + print bitcoin.BitcoinAddressFromPrivate("23DKRBLkeDbcSaddsMYLAHXhanPmGwkWAhSPVGbspAkc72Hw9BdrDF") + print bitcoin.BitcoinAddress() + + #Print the bitcoin address of the public key generated at the previous line + adr=bitcoin.BitcoinAddresFromPublicKey() + print adr + + #Sign a message with the current address + m="Hello World" + sig=bitcoin.SignECDSA("Hello World") + #Verify the message using only the bitcoin adress, the signature and the message. + #Not using the public key as it is not needed. + print bitcoin.VerifyMessageFromBitcoinAddress(adr,m,sig) diff --git a/src/lib/BitcoinECC/__init__.py b/src/lib/BitcoinECC/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/lib/BitcoinECC/newBitcoinECC.py b/src/lib/BitcoinECC/newBitcoinECC.py new file mode 100644 index 00000000..b09386bc --- /dev/null +++ b/src/lib/BitcoinECC/newBitcoinECC.py @@ -0,0 +1,460 @@ +import random +import hashlib +import base64 + +class GaussInt: + def __init__(self,x,y,n,p=0): + if p: + self.x=x%p + self.y=y%p + self.n=n%p + else: + self.x=x + self.y=y + self.n=n + + self.p=p + + def __add__(self,b): + return GaussInt(self.x+b.x,self.y+b.y,self.n,self.p) + + def __sub__(self,b): + return GaussInt(self.x-b.x,self.y-b.y,self.n,self.p) + + def __mul__(self,b): + return GaussInt(self.x*b.x+self.n*self.y*b.y,self.x*b.y+self.y*b.x,self.n,self.p) + + def __div__(self,b): + return GaussInt((self.x*b.x-self.n*self.y*b.y)/(b.x*b.x-self.n*b.y*b.y),(-self.x*b.y+self.y*b.x)/(b.x*b.x-self.n*b.y*b.y),self.n,self.p) + + def __eq__(self,b): + return self.x==b.x and self.y==b.y + + def __repr__(self): + if self.p: + return "%s+%s (%d,%d)"%(self.x,self.y,self.n,self.p) + else: + return "%s+%s (%d)"%(self.x,self.y,self.n) + + def __pow__(self,n): + b=Base(n,2) + t=GaussInt(1,0,self.n) + while b: + t=t*t + if b.pop(): + t=self*t + + return t + + def Inv(self): + return GaussInt(self.x/(self.x*self.x-self.n*self.y*self.y),-self.y/(self.x*self.x-self.n*self.y*self.y),self.n,self.p) + + def Eval(self): + return self.x.Eval()+self.y.Eval()*math.sqrt(self.n) + +def Cipolla(a,p): + b=0 + while pow((b*b-a)%p,(p-1)/2,p)==1: + b+=1 + + return (GaussInt(b,1,b**2-a,p)**((p+1)/2)).x + +def InvMod(a,n): + m=[] + + s=n + while n: + m.append(a/n) + (a,n)=(n,a%n) + + u=1 + v=0 + while m: + (u,v)=(v,u-m.pop()*v) + + return u%s + +def Base(n,b): + l=[] + while n: + l.append(n%b) + n/=b + + return l + +def MsgMagic(message): + return "\x18Bitcoin Signed Message:\n"+chr(len(message))+message + +def Hash(m,method): + h=hashlib.new(method) + h.update(m) + + return h.digest() + +def b58encode(v): + #Encode a byte string to the Base58 + digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + base=len(digit) + val=0 + for c in v: + val*=256 + val+=ord(c) + + result="" + while val: + (val,mod)=divmod(val,base) + result=digit[mod]+result + + pad=0 + for c in v: + if c=="\x00": + pad+=1 + else: + break + + return (digit[0]*pad)+result + +def b58decode(v): + #Decode a Base58 string to byte string + digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + base=len(digit) + val=0 + for c in v: + val*=base + val+=digit.find(c) + + result="" + while val: + (val,mod)=divmod(val,256) + result=chr(mod)+result + + pad=0 + for c in v: + if c==digit[0]: + pad+=1 + else: + break + + return "\x00"*pad+result + +def Byte2Int(b): + n=0 + for x in b: + n*=256 + n+=ord(x) + + return n + +def Byte2Hex(b): + #Convert a byte string to hex number + out="" + for x in b: + y=hex(ord(x))[2:] + if len(y)==1: + y="0"+y + out+="%2s"%y + + return out + +def Int2Byte(n,b): + #Convert a integer to a byte string of length b + out="" + + for _ in range(b): + (n,m)=divmod(n,256) + out=chr(m)+out + + return out + +class EllipticCurvePoint: + #Main class + #It's a point on an Elliptic Curve + + def __init__(self,x,a,b,p,n=0): + #We store the coordinate in x and the elliptic curve parameter. + #x is of length 3. This is the 3 projective coordinates of the point. + self.x=x[:] + self.a=a + self.b=b + self.p=p + self.n=n + + def __add__(self,y): + #The main function to add self and y + #It uses the formulas I derived in projective coordinates. + #Projectives coordinates are more efficient than the usual (x,y) coordinates + #because we don't need to compute inverse mod p, which is faster. + z=EllipticCurvePoint([0,0,0],self.a,self.b,self.p) + + if self==y: + d=(2*self.x[1]*self.x[2])%self.p + d3=pow(d,3,self.p) + n=(3*pow(self.x[0],2,self.p)+self.a*pow(self.x[2],2,self.p))%self.p + + z.x[0]=(pow(n,2,self.p)*d*self.x[2]-2*d3*self.x[0])%self.p + z.x[1]=(3*self.x[0]*n*pow(d,2,self.p)-pow(n,3,self.p)*self.x[2]-self.x[1]*d3)%self.p + z.x[2]=(self.x[2]*d3)%self.p + else: + d=(y.x[0]*self.x[2]-y.x[2]*self.x[0])%self.p + d3=pow(d,3,self.p) + n=(y.x[1]*self.x[2]-self.x[1]*y.x[2])%self.p + + z.x[0]=(y.x[2]*self.x[2]*pow(n,2,self.p)*d-d3*(y.x[2]*self.x[0]+y.x[0]*self.x[2]))%self.p + z.x[1]=(pow(d,2,self.p)*n*(2*self.x[0]*y.x[2]+y.x[0]*self.x[2])-pow(n,3,self.p)*self.x[2]*y.x[2]-self.x[1]*d3*y.x[2])%self.p + z.x[2]=(self.x[2]*d3*y.x[2])%self.p + + return z + + def __mul__(self,n): + #The fast multiplication of point n times by itself. + b=Base(n,2) + t=EllipticCurvePoint(self.x,self.a,self.b,self.p) + b.pop() + while b: + t+=t + if b.pop(): + t+=self + + return t + + def __repr__(self): + #print a point in (x,y) coordinate. + return "x=%d\ny=%d\n"%((self.x[0]*InvMod(self.x[2],self.p))%self.p,(self.x[1]*InvMod(self.x[2],self.p))%self.p) + + def __eq__(self,y): + #Does self==y ? + #It computes self cross product with x and check if the result is 0. + return self.x[0]*y.x[1]==self.x[1]*y.x[0] and self.x[1]*y.x[2]==self.x[2]*y.x[1] and self.x[2]*y.x[0]==self.x[0]*y.x[2] and self.a==y.a and self.b==y.b and self.p==y.p + + def __ne__(self,y): + #Does self!=x ? + return not (self == y) + + def Normalize(self): + #Transform projective coordinates of self to the usual (x,y) coordinates. + if self.x[2]: + self.x[0]=(self.x[0]*InvMod(self.x[2],self.p))%self.p + self.x[1]=(self.x[1]*InvMod(self.x[2],self.p))%self.p + self.x[2]=1 + elif self.x[1]: + self.x[0]=(self.x[0]*InvMod(self.x[1],self.p))%self.p + self.x[1]=1 + elif self.x[0]: + self.x[0]=1 + else: + raise Exception + + def Check(self): + #Is self on the curve ? + return (self.x[0]**3+self.a*self.x[0]*self.x[2]**2+self.b*self.x[2]**3-self.x[1]**2*self.x[2])%self.p==0 + + + def CryptAddr(self,filename,password,Address): + txt="" + for tag in Address: + (addr,priv)=Address[tag] + if priv: + txt+="%s\t%s\t%s\n"%(tag,addr,priv) + else: + txt+="%s\t%s\t\n"%(tag,addr) + + txt+="\x00"*(15-(len(txt)-1)%16) + + password+="\x00"*(15-(len(password)-1)%16) + crypt=twofish.Twofish(password).encrypt(txt) + + f=open(filename,"wb") + f.write(crypt) + f.close() + + def GenerateD(self): + #Generate a private key. It's just a random number between 1 and n-1. + #Of course, this function isn't cryptographically secure. + #Don't use it to generate your key. Use a cryptographically secure source of randomness instead. + #return random.randint(1,self.n-1) + return random.SystemRandom().randint(1,self.n-1) # Better random fix + + def CheckECDSA(self,sig,message,Q): + #Check a signature (r,s) of the message m using the public key self.Q + # and the generator which is self. + #This is not the one used by Bitcoin because the public key isn't known; + # only a hash of the public key is known. See the function VerifyMessageFromAddress. + (r,s)=sig + + if Q.x[2]==0: + return False + if not Q.Check(): + return False + if (Q*self.n).x[2]!=0: + return False + if r<1 or r>self.n-1 or s<1 or s>self.n-1: + return False + + z=Byte2Int(Hash(Hash(MsgMagic(message),"SHA256"),"SHA256")) + + w=InvMod(s,self.n) + u1=(z*w)%self.n + u2=(r*w)%self.n + R=self*u1+Q*u2 + R.Normalize() + + return (R.x[0]-r)%self.n==0 + + def SignMessage(self,message,priv): + #Sign a message. The private key is self.d. + (d,uncompressed)=self.DFromPriv(priv) + + z=Byte2Int(Hash(Hash(MsgMagic(message),"SHA256"),"SHA256")) + + r=0 + s=0 + while not r or not s: + #k=random.randint(1,self.n-1) + k=random.SystemRandom().randint(1,self.n-1) # Better random fix + R=self*k + R.Normalize() + r=R.x[0]%self.n + s=(InvMod(k,self.n)*(z+r*d))%self.n + + val=27 + if not uncompressed: + val+=4 + + return base64.standard_b64encode(chr(val)+Int2Byte(r,32)+Int2Byte(s,32)) + + def VerifyMessageFromAddress(self,addr,message,sig): + #Check a signature (r,s) for the message m signed by the Bitcoin + # address "addr". + + sign=base64.standard_b64decode(sig) + (r,s)=(Byte2Int(sign[1:33]),Byte2Int(sign[33:65])) + + z=Byte2Int(Hash(Hash(MsgMagic(message),"SHA256"),"SHA256")) + + val=ord(sign[0]) + if val<27 or val>=35: + return False + + if val>=31: + uncompressed=False + val-=4 + else: + uncompressed=True + + x=r + y2=(pow(x,3,self.p) + self.a*x + self.b) % self.p + y=Cipolla(y2,self.p) + + for _ in range(2): + kG=EllipticCurvePoint([x,y,1],self.a,self.b,self.p,self.n) + mzG=self*((-z)%self.n) + Q=(kG*s+mzG)*InvMod(r,self.n) + + if self.AddressFromPublicKey(Q,uncompressed)==addr: + return True + + y=self.p-y + + return False + + def AddressFromPrivate(self,priv): + #Transform a private key to a bitcoin address. + (d,uncompressed)=self.DFromPriv(priv) + + return self.AddressFromD(d,uncompressed) + + def PrivFromD(self,d,uncompressed): + #Encode a private key self.d to base58 encoding. + p=Int2Byte(d,32) + p="\x80"+p + + if not uncompressed: + p+=chr(1) + + cs=Hash(Hash(p,"SHA256"),"SHA256")[:4] + + return b58encode(p+cs) + + def DFromPriv(self,priv): + uncompressed=(len(priv)==51) + priv=b58decode(priv) + + if uncompressed: + priv=priv[:-4] + else: + priv=priv[:-5] + + return (Byte2Int(priv[1:]),uncompressed) + + def AddressFromPublicKey(self,Q,uncompressed): + #Find the bitcoin address from the public key self.Q + #We do normalization to go from the projective coordinates to the usual + # (x,y) coordinates. + Q.Normalize() + if uncompressed: + pk=chr(4)+Int2Byte(Q.x[0],32)+Int2Byte(Q.x[1],32) + else: + pk=chr(2+Q.x[1]%2)+Int2Byte(Q.x[0],32) + + kh=chr(0)+Hash(Hash(pk,"SHA256"),"RIPEMD160") + cs=Hash(Hash(kh,"SHA256"),"SHA256")[:4] + + return b58encode(kh+cs) + + def AddressFromD(self,d,uncompressed): + #Computes a bitcoin address given the private key self.d. + return self.AddressFromPublicKey(self*d,uncompressed) + + def IsValid(self,addr): + adr=b58decode(addr) + kh=adr[:-4] + cs=adr[-4:] + + verif=Hash(Hash(kh,"SHA256"),"SHA256")[:4] + + return cs==verif + + def AddressGenerator(self,k,uncompressed=True): + #Generate Bitcoin address and write them in the multibit format. + #Change the date as you like. + liste={} + for i in range(k): + d=self.GenerateD() + addr=self.AddressFromD(d,uncompressed) + priv=self.PrivFromD(d,uncompressed) + liste[i]=[addr,priv] + print "%s %s"%(addr, priv) + + return liste + +def Bitcoin(): + a=0 + b=7 + p=2**256-2**32-2**9-2**8-2**7-2**6-2**4-1 + Gx=int("79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",16) + Gy=int("483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8",16) + n=int("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141",16) + + return EllipticCurvePoint([Gx,Gy,1],a,b,p,n) + +def main(): + bitcoin=Bitcoin() + + #Generate an adress from the private key + privkey = "PrivatekeyinBase58" + adr = bitcoin.AddressFromPrivate(privkey) + print "Address : ", adr + + #Sign a message with the current address + m="Hello World" + sig=bitcoin.SignMessage("Hello World", privkey) + #Verify the message using only the bitcoin adress, the signature and the message. + #Not using the public key as it is not needed. + if bitcoin.VerifyMessageFromAddress(adr,m,sig): + print "Message verified" + + #Generate some addresses + print "Here are some adresses and associated private keys" + bitcoin.AddressGenerator(10) + +if __name__ == "__main__": main() diff --git a/src/lib/Ed25519.py b/src/lib/Ed25519.py deleted file mode 100644 index 20bdc1a9..00000000 --- a/src/lib/Ed25519.py +++ /dev/null @@ -1,340 +0,0 @@ -## ZeroNet onion V3 support -## The following copied code is copied from stem.util.ed25519 official Tor Project python3 lib -## url : https://gitweb.torproject.org/stem.git/tree/stem/util/ed25519.py -## the ##modified tag means that the function has been modified respect to the one used by stem lib -## the ##custom tag means that the function has been added by me and it's not present on the stem ed25519.py file -## every comment i make begins with ## -## -# The following is copied from... -# -# https://github.com/pyca/ed25519 -# -# This is under the CC0 license. For more information please see... -# -# https://github.com/pyca/cryptography/issues/5068 - -# ed25519.py - Optimized version of the reference implementation of Ed25519 -# -# Written in 2011? by Daniel J. Bernstein -# 2013 by Donald Stufft -# 2013 by Alex Gaynor -# 2013 by Greg Price -# -# To the extent possible under law, the author(s) have dedicated all copyright -# and related and neighboring rights to this software to the public domain -# worldwide. This software is distributed without any warranty. -# -# You should have received a copy of the CC0 Public Domain Dedication along -# with this software. If not, see -# . - -""" -NB: This code is not safe for use with secret keys or secret data. -The only safe use of this code is for verifying signatures on public messages. - -Functions for computing the public key of a secret key and for signing -a message are included, namely publickey_unsafe and signature_unsafe, -for testing purposes only. - -The root of the problem is that Python's long-integer arithmetic is -not designed for use in cryptography. Specifically, it may take more -or less time to execute an operation depending on the values of the -inputs, and its memory access patterns may also depend on the inputs. -This opens it to timing and cache side-channel attacks which can -disclose data to an attacker. We rely on Python's long-integer -arithmetic, so we cannot handle secrets without risking their disclosure. -""" - -import hashlib -import operator -import sys -import base64 - - -__version__ = "1.0.dev0" - - -# Useful for very coarse version differentiation. -PY3 = sys.version_info[0] == 3 - -if PY3: - indexbytes = operator.getitem - intlist2bytes = bytes - int2byte = operator.methodcaller("to_bytes", 1, "big") -else: - int2byte = chr - range = list(range(1,10000000)) - - def indexbytes(buf, i): - return ord(buf[i]) - - def intlist2bytes(l): - return b"".join(chr(c) for c in l) - - -b = 256 -q = 2 ** 255 - 19 -l = 2 ** 252 + 27742317777372353535851937790883648493 - - -def H(m): - return hashlib.sha512(m).digest() - - -def pow2(x, p): - """== pow(x, 2**p, q)""" - while p > 0: - x = x * x % q - p -= 1 - return x - - -def inv(z): - """$= z^{-1} \mod q$, for z != 0""" - # Adapted from curve25519_athlon.c in djb's Curve25519. - z2 = z * z % q # 2 - z9 = pow2(z2, 2) * z % q # 9 - z11 = z9 * z2 % q # 11 - z2_5_0 = (z11 * z11) % q * z9 % q # 31 == 2^5 - 2^0 - z2_10_0 = pow2(z2_5_0, 5) * z2_5_0 % q # 2^10 - 2^0 - z2_20_0 = pow2(z2_10_0, 10) * z2_10_0 % q # ... - z2_40_0 = pow2(z2_20_0, 20) * z2_20_0 % q - z2_50_0 = pow2(z2_40_0, 10) * z2_10_0 % q - z2_100_0 = pow2(z2_50_0, 50) * z2_50_0 % q - z2_200_0 = pow2(z2_100_0, 100) * z2_100_0 % q - z2_250_0 = pow2(z2_200_0, 50) * z2_50_0 % q # 2^250 - 2^0 - return pow2(z2_250_0, 5) * z11 % q # 2^255 - 2^5 + 11 = q - 2 - - -d = -121665 * inv(121666) % q -I = pow(2, (q - 1) // 4, q) - - -def xrecover(y): - xx = (y * y - 1) * inv(d * y * y + 1) - x = pow(xx, (q + 3) // 8, q) - - if (x * x - xx) % q != 0: - x = (x * I) % q - - if x % 2 != 0: - x = q-x - - return x - - -By = 4 * inv(5) -Bx = xrecover(By) -B = (Bx % q, By % q, 1, (Bx * By) % q) -ident = (0, 1, 1, 0) - - -def edwards_add(P, Q): - # This is formula sequence 'addition-add-2008-hwcd-3' from - # http://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html - (x1, y1, z1, t1) = P - (x2, y2, z2, t2) = Q - - a = (y1-x1)*(y2-x2) % q - b = (y1+x1)*(y2+x2) % q - c = t1*2*d*t2 % q - dd = z1*2*z2 % q - e = b - a - f = dd - c - g = dd + c - h = b + a - x3 = e*f - y3 = g*h - t3 = e*h - z3 = f*g - - return (x3 % q, y3 % q, z3 % q, t3 % q) - - -def edwards_double(P): - # This is formula sequence 'dbl-2008-hwcd' from - # http://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html - (x1, y1, z1, t1) = P - - a = x1*x1 % q - b = y1*y1 % q - c = 2*z1*z1 % q - # dd = -a - e = ((x1+y1)*(x1+y1) - a - b) % q - g = -a + b # dd + b - f = g - c - h = -a - b # dd - b - x3 = e*f - y3 = g*h - t3 = e*h - z3 = f*g - - return (x3 % q, y3 % q, z3 % q, t3 % q) - - -def scalarmult(P, e): - if e == 0: - return ident - Q = scalarmult(P, e // 2) - Q = edwards_double(Q) - if e & 1: - Q = edwards_add(Q, P) - return Q - - -# Bpow[i] == scalarmult(B, 2**i) -Bpow = [] - - -def make_Bpow(): - P = B - for i in range(253): - Bpow.append(P) - P = edwards_double(P) -make_Bpow() - - -def scalarmult_B(e): - """ - Implements scalarmult(B, e) more efficiently. - """ - # scalarmult(B, l) is the identity - e = e % l - P = ident - for i in range(253): - if e & 1: - P = edwards_add(P, Bpow[i]) - e = e // 2 - assert e == 0, e - return P - - -def encodeint(y): - bits = [(y >> i) & 1 for i in range(b)] - return b''.join([ - int2byte(sum([bits[i * 8 + j] << j for j in range(8)])) - for i in range(b//8) - ]) - - -def encodepoint(P): - (x, y, z, t) = P - zi = inv(z) - x = (x * zi) % q - y = (y * zi) % q - bits = [(y >> i) & 1 for i in range(b - 1)] + [x & 1] - return b''.join([ - int2byte(sum([bits[i * 8 + j] << j for j in range(8)])) - for i in range(b // 8) - ]) - - -def bit(h, i): - return (indexbytes(h, i // 8) >> (i % 8)) & 1 - -##modified -def publickey_unsafe(sk): - """ - Not safe to use with secret keys or secret data. - - See module docstring. This function should be used for testing only. - """ - ##h = H(sk) - h = sk - a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2)) - A = scalarmult_B(a) - return encodepoint(A) - -##custom -## from stem.util.str_tools._to_unicode_impl -## from https://gitweb.torproject.org/stem.git/tree/stem/util/str_tools.py#n80 -def to_unicode_impl(msg): - if msg is not None and not isinstance(msg, str): - return msg.decode('utf-8', 'replace') - else: - return msg - -##custom -## rewritten stem.descriptor.hidden_service.address_from_identity_key -## from https://gitweb.torproject.org/stem.git/tree/stem/descriptor/hidden_service.py#n1088 -def publickey_to_onionaddress(key): - CHECKSUM_CONSTANT = b'.onion checksum' - ## version = stem.client.datatype.Size.CHAR.pack(3) - version = b'\x03' - checksum = hashlib.sha3_256(CHECKSUM_CONSTANT + key + version).digest()[:2] - onion_address = base64.b32encode(key + checksum + version) - return to_unicode_impl(onion_address + b'.onion').lower() - - -def Hint(m): - h = H(m) - return sum(2 ** i * bit(h, i) for i in range(2 * b)) - -##modified -def signature_unsafe(m, sk, pk): - """ - Not safe to use with secret keys or secret data. - - See module docstring. This function should be used for testing only. - """ - ##h = H(sk) - h = sk - a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2)) - r = Hint( - intlist2bytes([indexbytes(h, j) for j in range(b // 8, b // 4)]) + m - ) - R = scalarmult_B(r) - S = (r + Hint(encodepoint(R) + pk + m) * a) % l - return encodepoint(R) + encodeint(S) - - -def isoncurve(P): - (x, y, z, t) = P - return (z % q != 0 and - x*y % q == z*t % q and - (y*y - x*x - z*z - d*t*t) % q == 0) - - -def decodeint(s): - return sum(2 ** i * bit(s, i) for i in range(0, b)) - - -def decodepoint(s): - y = sum(2 ** i * bit(s, i) for i in range(0, b - 1)) - x = xrecover(y) - if x & 1 != bit(s, b-1): - x = q - x - P = (x, y, 1, (x*y) % q) - if not isoncurve(P): - raise ValueError("decoding point that is not on curve") - return P - - -class SignatureMismatch(Exception): - pass - - -def checkvalid(s, m, pk): - """ - Not safe to use when any argument is secret. - - See module docstring. This function should be used only for - verifying public signatures of public messages. - """ - if len(s) != b // 4: - raise ValueError("signature length is wrong") - - if len(pk) != b // 8: - raise ValueError("public-key length is wrong") - - R = decodepoint(s[:b // 8]) - A = decodepoint(pk) - S = decodeint(s[b // 8:b // 4]) - h = Hint(encodepoint(R) + pk + m) - - (x1, y1, z1, t1) = P = scalarmult_B(S) - (x2, y2, z2, t2) = Q = edwards_add(R, scalarmult(A, h)) - - if (not isoncurve(P) or not isoncurve(Q) or - (x1*z2 - x2*z1) % q != 0 or (y1*z2 - y2*z1) % q != 0): - raise SignatureMismatch("signature does not pass verification") diff --git a/src/lib/PySocks/LICENSE b/src/lib/PySocks/LICENSE new file mode 100644 index 00000000..04b6b1f3 --- /dev/null +++ b/src/lib/PySocks/LICENSE @@ -0,0 +1,22 @@ +Copyright 2006 Dan-Haim. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +3. Neither the name of Dan Haim nor the names of his contributors may be used + to endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA +OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE. diff --git a/src/lib/PySocks/README.md b/src/lib/PySocks/README.md new file mode 100644 index 00000000..65419240 --- /dev/null +++ b/src/lib/PySocks/README.md @@ -0,0 +1,299 @@ +PySocks +======= + +Updated version of SocksiPy. Many old bugs fixed, and overall code cleanup. + +Acts as a drop-in replacement to the socket module. + +---------------- + +Features +======== + +* Fully supports Python 2.6 - 3.4 + +* UDP support + +* SocksiPyHandler, courtesy e000, was also added as an example of how this module can be used with urllib2. See example code in sockshandler.py. `pip install` and `setup.py install` will automatically install the `sockshandler` module. + +* Bugs in the original SocksiPy were fixed, including two that could lead to infinite hanging when communicating with bad proxy servers. + +* urllib3, which powers the requests module, is working on integrating SOCKS proxy support based on this branch + +* `SOCKS5`, `SOCKS4`, and `HTTP` are now aliases for `PROXY_TYPE_SOCKS5`, `PROXY_TYPE_SOCKS4`, and `PROXY_TYPE_HTTP` + +* Tests added + +* Various style and performance improvements; codebase simplified + +* Actively maintained + +Installation +============ + + pip install PySocks + +Or download the tarball / `git clone` and... + + python setup.py install + +These will install both the `socks` and `sockshandler` modules. + +Alternatively, include just `socks.py` in your project. + +-------------------------------------------- + +*Warning:* PySocks/SocksiPy only supports HTTP proxies that use CONNECT tunneling. Certain HTTP proxies may not work with this library. If you wish to use HTTP proxies (and not SOCKS proxies), it is recommended that you rely on your HTTP client's native proxy support (`proxies` dict for `requests`, or `urllib2.ProxyHandler` for `urllib2`) instead. + +-------------------------------------------- + +Usage +===== + +## Example ## + + import socks + + s = socks.socksocket() + + s.set_proxy(socks.SOCKS5, "localhost") # SOCKS4 and SOCKS5 use port 1080 by default + # Or + s.set_proxy(socks.SOCKS4, "localhost", 4444) + # Or + s.set_proxy(socks.HTTP, "5.5.5.5", 8888) + + # Can be treated identical to a regular socket object + s.connect(("www.test.com", 80)) + s.sendall("GET / ...") + print s.recv(4096) + + +To monkeypatch the entire standard library with a single default proxy: + + import socket + import socks + import urllib2 + + socks.set_default_proxy(socks.SOCKS5, "localhost") + socket.socket = socks.socksocket + + urllib2.urlopen("http://...") # All requests will pass through the SOCKS proxy + +Note that monkeypatching may not work for all standard modules or for all third party modules, and generally isn't recommended. + +-------------------------------------------- + +Original SocksiPy README attached below, amended to reflect API changes. + +-------------------------------------------- + +SocksiPy - version 1.5.0 + +A Python SOCKS module. + +(C) 2006 Dan-Haim. All rights reserved. + +See LICENSE file for details. + + +*WHAT IS A SOCKS PROXY?* + +A SOCKS proxy is a proxy server at the TCP level. In other words, it acts as +a tunnel, relaying all traffic going through it without modifying it. +SOCKS proxies can be used to relay traffic using any network protocol that +uses TCP. + +*WHAT IS SOCKSIPY?* + +This Python module allows you to create TCP connections through a SOCKS +proxy without any special effort. +It also supports relaying UDP packets with a SOCKS5 proxy. + +*PROXY COMPATIBILITY* + +SocksiPy is compatible with three different types of proxies: + +1. SOCKS Version 4 (SOCKS4), including the SOCKS4a extension. +2. SOCKS Version 5 (SOCKS5). +3. HTTP Proxies which support tunneling using the CONNECT method. + +*SYSTEM REQUIREMENTS* + +Being written in Python, SocksiPy can run on any platform that has a Python +interpreter and TCP/IP support. +This module has been tested with Python 2.3 and should work with greater versions +just as well. + + +INSTALLATION +------------- + +Simply copy the file "socks.py" to your Python's `lib/site-packages` directory, +and you're ready to go. [Editor's note: it is better to use `python setup.py install` for PySocks] + + +USAGE +------ + +First load the socks module with the command: + + >>> import socks + >>> + +The socks module provides a class called `socksocket`, which is the base to all of the module's functionality. + +The `socksocket` object has the same initialization parameters as the normal socket +object to ensure maximal compatibility, however it should be noted that `socksocket` will only function with family being `AF_INET` and +type being either `SOCK_STREAM` or `SOCK_DGRAM`. +Generally, it is best to initialize the `socksocket` object with no parameters + + >>> s = socks.socksocket() + >>> + +The `socksocket` object has an interface which is very similiar to socket's (in fact +the `socksocket` class is derived from socket) with a few extra methods. +To select the proxy server you would like to use, use the `set_proxy` method, whose +syntax is: + + set_proxy(proxy_type, addr[, port[, rdns[, username[, password]]]]) + +Explanation of the parameters: + +`proxy_type` - The type of the proxy server. This can be one of three possible +choices: `PROXY_TYPE_SOCKS4`, `PROXY_TYPE_SOCKS5` and `PROXY_TYPE_HTTP` for SOCKS4, +SOCKS5 and HTTP servers respectively. `SOCKS4`, `SOCKS5`, and `HTTP` are all aliases, respectively. + +`addr` - The IP address or DNS name of the proxy server. + +`port` - The port of the proxy server. Defaults to 1080 for socks and 8080 for http. + +`rdns` - This is a boolean flag than modifies the behavior regarding DNS resolving. +If it is set to True, DNS resolving will be preformed remotely, on the server. +If it is set to False, DNS resolving will be preformed locally. Please note that +setting this to True with SOCKS4 servers actually use an extension to the protocol, +called SOCKS4a, which may not be supported on all servers (SOCKS5 and http servers +always support DNS). The default is True. + +`username` - For SOCKS5 servers, this allows simple username / password authentication +with the server. For SOCKS4 servers, this parameter will be sent as the userid. +This parameter is ignored if an HTTP server is being used. If it is not provided, +authentication will not be used (servers may accept unauthenticated requests). + +`password` - This parameter is valid only for SOCKS5 servers and specifies the +respective password for the username provided. + +Example of usage: + + >>> s.set_proxy(socks.SOCKS5, "socks.example.com") # uses default port 1080 + >>> s.set_proxy(socks.SOCKS4, "socks.test.com", 1081) + +After the set_proxy method has been called, simply call the connect method with the +traditional parameters to establish a connection through the proxy: + + >>> s.connect(("www.sourceforge.net", 80)) + >>> + +Connection will take a bit longer to allow negotiation with the proxy server. +Please note that calling connect without calling `set_proxy` earlier will connect +without a proxy (just like a regular socket). + +Errors: Any errors in the connection process will trigger exceptions. The exception +may either be generated by the underlying socket layer or may be custom module +exceptions, whose details follow: + +class `ProxyError` - This is a base exception class. It is not raised directly but +rather all other exception classes raised by this module are derived from it. +This allows an easy way to catch all proxy-related errors. It descends from `IOError`. + +All `ProxyError` exceptions have an attribute `socket_err`, which will contain either a +caught `socket.error` exception, or `None` if there wasn't any. + +class `GeneralProxyError` - When thrown, it indicates a problem which does not fall +into another category. + +* `Sent invalid data` - This error means that unexpected data has been received from +the server. The most common reason is that the server specified as the proxy is +not really a SOCKS4/SOCKS5/HTTP proxy, or maybe the proxy type specified is wrong. + +* `Connection closed unexpectedly` - The proxy server unexpectedly closed the connection. +This may indicate that the proxy server is experiencing network or software problems. + +* `Bad proxy type` - This will be raised if the type of the proxy supplied to the +set_proxy function was not one of `SOCKS4`/`SOCKS5`/`HTTP`. + +* `Bad input` - This will be raised if the `connect()` method is called with bad input +parameters. + +class `SOCKS5AuthError` - This indicates that the connection through a SOCKS5 server +failed due to an authentication problem. + +* `Authentication is required` - This will happen if you use a SOCKS5 server which +requires authentication without providing a username / password at all. + +* `All offered authentication methods were rejected` - This will happen if the proxy +requires a special authentication method which is not supported by this module. + +* `Unknown username or invalid password` - Self descriptive. + +class `SOCKS5Error` - This will be raised for SOCKS5 errors which are not related to +authentication. +The parameter is a tuple containing a code, as given by the server, +and a description of the +error. The possible errors, according to the RFC, are: + +* `0x01` - General SOCKS server failure - If for any reason the proxy server is unable to +fulfill your request (internal server error). +* `0x02` - connection not allowed by ruleset - If the address you're trying to connect to +is blacklisted on the server or requires authentication. +* `0x03` - Network unreachable - The target could not be contacted. A router on the network +had replied with a destination net unreachable error. +* `0x04` - Host unreachable - The target could not be contacted. A router on the network +had replied with a destination host unreachable error. +* `0x05` - Connection refused - The target server has actively refused the connection +(the requested port is closed). +* `0x06` - TTL expired - The TTL value of the SYN packet from the proxy to the target server +has expired. This usually means that there are network problems causing the packet +to be caught in a router-to-router "ping-pong". +* `0x07` - Command not supported - For instance if the server does not support UDP. +* `0x08` - Address type not supported - The client has provided an invalid address type. +When using this module, this error should not occur. + +class `SOCKS4Error` - This will be raised for SOCKS4 errors. The parameter is a tuple +containing a code and a description of the error, as given by the server. The +possible error, according to the specification are: + +* `0x5B` - Request rejected or failed - Will be raised in the event of an failure for any +reason other then the two mentioned next. +* `0x5C` - request rejected because SOCKS server cannot connect to identd on the client - +The Socks server had tried an ident lookup on your computer and has failed. In this +case you should run an identd server and/or configure your firewall to allow incoming +connections to local port 113 from the remote server. +* `0x5D` - request rejected because the client program and identd report different user-ids - +The Socks server had performed an ident lookup on your computer and has received a +different userid than the one you have provided. Change your userid (through the +username parameter of the set_proxy method) to match and try again. + +class `HTTPError` - This will be raised for HTTP errors. The message will contain +the HTTP status code and provided error message. + +After establishing the connection, the object behaves like a standard socket. +Methods like `makefile()` and `settimeout()` should behave just like regular sockets. +Call the `close()` method to close the connection. + +In addition to the `socksocket` class, an additional function worth mentioning is the +`set_default_proxy` function. The parameters are the same as the `set_proxy` method. +This function will set default proxy settings for newly created `socksocket` objects, +in which the proxy settings haven't been changed via the `set_proxy` method. +This is quite useful if you wish to force 3rd party modules to use a SOCKS proxy, +by overriding the socket object. +For example: + + >>> socks.set_default_proxy(socks.SOCKS5, "socks.example.com") + >>> socket.socket = socks.socksocket + >>> urllib.urlopen("http://www.sourceforge.net/") + + +PROBLEMS +--------- + +Please open a GitHub issue at https://github.com/Anorov/PySocks diff --git a/src/lib/PySocks/__init__.py b/src/lib/PySocks/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/lib/PySocks/setup.py b/src/lib/PySocks/setup.py new file mode 100644 index 00000000..9db0f3d2 --- /dev/null +++ b/src/lib/PySocks/setup.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python +from distutils.core import setup + +VERSION = "1.5.3" + +setup( + name = "PySocks", + version = VERSION, + description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information.", + url = "https://github.com/Anorov/PySocks", + license = "BSD", + author = "Anorov", + author_email = "anorov.vorona@gmail.com", + keywords = ["socks", "proxy"], + py_modules=["socks", "sockshandler"] +) + diff --git a/src/lib/PySocks/socks.py b/src/lib/PySocks/socks.py new file mode 100644 index 00000000..ad1e9780 --- /dev/null +++ b/src/lib/PySocks/socks.py @@ -0,0 +1,699 @@ +""" +SocksiPy - Python SOCKS module. +Version 1.5.3 + +Copyright 2006 Dan-Haim. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +3. Neither the name of Dan Haim nor the names of his contributors may be used + to endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA +OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE. + + +This module provides a standard socket-like interface for Python +for tunneling connections through SOCKS proxies. + +=============================================================================== + +Minor modifications made by Christopher Gilbert (http://motomastyle.com/) +for use in PyLoris (http://pyloris.sourceforge.net/) + +Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/) +mainly to merge bug fixes found in Sourceforge + +Modifications made by Anorov (https://github.com/Anorov) +-Forked and renamed to PySocks +-Fixed issue with HTTP proxy failure checking (same bug that was in the old ___recvall() method) +-Included SocksiPyHandler (sockshandler.py), to be used as a urllib2 handler, + courtesy of e000 (https://github.com/e000): https://gist.github.com/869791#file_socksipyhandler.py +-Re-styled code to make it readable + -Aliased PROXY_TYPE_SOCKS5 -> SOCKS5 etc. + -Improved exception handling and output + -Removed irritating use of sequence indexes, replaced with tuple unpacked variables + -Fixed up Python 3 bytestring handling - chr(0x03).encode() -> b"\x03" + -Other general fixes +-Added clarification that the HTTP proxy connection method only supports CONNECT-style tunneling HTTP proxies +-Various small bug fixes +""" + +__version__ = "1.5.3" + +import socket +import struct +from errno import EOPNOTSUPP, EINVAL, EAGAIN +from io import BytesIO +from os import SEEK_CUR +from collections import Callable + +PROXY_TYPE_SOCKS4 = SOCKS4 = 1 +PROXY_TYPE_SOCKS5 = SOCKS5 = 2 +PROXY_TYPE_HTTP = HTTP = 3 + +PROXY_TYPES = {"SOCKS4": SOCKS4, "SOCKS5": SOCKS5, "HTTP": HTTP} +PRINTABLE_PROXY_TYPES = dict(zip(PROXY_TYPES.values(), PROXY_TYPES.keys())) + +_orgsocket = _orig_socket = socket.socket + +class ProxyError(IOError): + """ + socket_err contains original socket.error exception. + """ + def __init__(self, msg, socket_err=None): + self.msg = msg + self.socket_err = socket_err + + if socket_err: + self.msg += ": {0}".format(socket_err) + + def __str__(self): + return self.msg + +class GeneralProxyError(ProxyError): pass +class ProxyConnectionError(ProxyError): pass +class SOCKS5AuthError(ProxyError): pass +class SOCKS5Error(ProxyError): pass +class SOCKS4Error(ProxyError): pass +class HTTPError(ProxyError): pass + +SOCKS4_ERRORS = { 0x5B: "Request rejected or failed", + 0x5C: "Request rejected because SOCKS server cannot connect to identd on the client", + 0x5D: "Request rejected because the client program and identd report different user-ids" + } + +SOCKS5_ERRORS = { 0x01: "General SOCKS server failure", + 0x02: "Connection not allowed by ruleset", + 0x03: "Network unreachable", + 0x04: "Host unreachable", + 0x05: "Connection refused", + 0x06: "TTL expired", + 0x07: "Command not supported, or protocol error", + 0x08: "Address type not supported" + } + +DEFAULT_PORTS = { SOCKS4: 1080, + SOCKS5: 1080, + HTTP: 8080 + } + +def set_default_proxy(proxy_type=None, addr=None, port=None, rdns=True, username=None, password=None): + """ + set_default_proxy(proxy_type, addr[, port[, rdns[, username, password]]]) + + Sets a default proxy which all further socksocket objects will use, + unless explicitly changed. All parameters are as for socket.set_proxy(). + """ + socksocket.default_proxy = (proxy_type, addr, port, rdns, + username.encode() if username else None, + password.encode() if password else None) + +setdefaultproxy = set_default_proxy + +def get_default_proxy(): + """ + Returns the default proxy, set by set_default_proxy. + """ + return socksocket.default_proxy + +getdefaultproxy = get_default_proxy + +def wrap_module(module): + """ + Attempts to replace a module's socket library with a SOCKS socket. Must set + a default proxy using set_default_proxy(...) first. + This will only work on modules that import socket directly into the namespace; + most of the Python Standard Library falls into this category. + """ + if socksocket.default_proxy: + module.socket.socket = socksocket + else: + raise GeneralProxyError("No default proxy specified") + +wrapmodule = wrap_module + +def create_connection(dest_pair, proxy_type=None, proxy_addr=None, + proxy_port=None, proxy_username=None, + proxy_password=None, timeout=None, + source_address=None): + """create_connection(dest_pair, *[, timeout], **proxy_args) -> socket object + + Like socket.create_connection(), but connects to proxy + before returning the socket object. + + dest_pair - 2-tuple of (IP/hostname, port). + **proxy_args - Same args passed to socksocket.set_proxy() if present. + timeout - Optional socket timeout value, in seconds. + source_address - tuple (host, port) for the socket to bind to as its source + address before connecting (only for compatibility) + """ + sock = socksocket() + if isinstance(timeout, (int, float)): + sock.settimeout(timeout) + if proxy_type is not None: + sock.set_proxy(proxy_type, proxy_addr, proxy_port, + proxy_username, proxy_password) + sock.connect(dest_pair) + return sock + +class _BaseSocket(socket.socket): + """Allows Python 2's "delegated" methods such as send() to be overridden + """ + def __init__(self, *pos, **kw): + _orig_socket.__init__(self, *pos, **kw) + + self._savedmethods = dict() + for name in self._savenames: + self._savedmethods[name] = getattr(self, name) + delattr(self, name) # Allows normal overriding mechanism to work + + _savenames = list() + +def _makemethod(name): + return lambda self, *pos, **kw: self._savedmethods[name](*pos, **kw) +for name in ("sendto", "send", "recvfrom", "recv"): + method = getattr(_BaseSocket, name, None) + + # Determine if the method is not defined the usual way + # as a function in the class. + # Python 2 uses __slots__, so there are descriptors for each method, + # but they are not functions. + if not isinstance(method, Callable): + _BaseSocket._savenames.append(name) + setattr(_BaseSocket, name, _makemethod(name)) + +class socksocket(_BaseSocket): + """socksocket([family[, type[, proto]]]) -> socket object + + Open a SOCKS enabled socket. The parameters are the same as + those of the standard socket init. In order for SOCKS to work, + you must specify family=AF_INET and proto=0. + The "type" argument must be either SOCK_STREAM or SOCK_DGRAM. + """ + + default_proxy = None + + def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None): + if type not in (socket.SOCK_STREAM, socket.SOCK_DGRAM): + msg = "Socket type must be stream or datagram, not {!r}" + raise ValueError(msg.format(type)) + + _BaseSocket.__init__(self, family, type, proto, _sock) + self._proxyconn = None # TCP connection to keep UDP relay alive + + if self.default_proxy: + self.proxy = self.default_proxy + else: + self.proxy = (None, None, None, None, None, None) + self.proxy_sockname = None + self.proxy_peername = None + + def _readall(self, file, count): + """ + Receive EXACTLY the number of bytes requested from the file object. + Blocks until the required number of bytes have been received. + """ + data = b"" + while len(data) < count: + d = file.read(count - len(data)) + if not d: + raise GeneralProxyError("Connection closed unexpectedly") + data += d + return data + + def set_proxy(self, proxy_type=None, addr=None, port=None, rdns=True, username=None, password=None): + """set_proxy(proxy_type, addr[, port[, rdns[, username[, password]]]]) + Sets the proxy to be used. + + proxy_type - The type of the proxy to be used. Three types + are supported: PROXY_TYPE_SOCKS4 (including socks4a), + PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP + addr - The address of the server (IP or DNS). + port - The port of the server. Defaults to 1080 for SOCKS + servers and 8080 for HTTP proxy servers. + rdns - Should DNS queries be performed on the remote side + (rather than the local side). The default is True. + Note: This has no effect with SOCKS4 servers. + username - Username to authenticate with to the server. + The default is no authentication. + password - Password to authenticate with to the server. + Only relevant when username is also provided. + """ + self.proxy = (proxy_type, addr, port, rdns, + username.encode() if username else None, + password.encode() if password else None) + + setproxy = set_proxy + + def bind(self, *pos, **kw): + """ + Implements proxy connection for UDP sockets, + which happens during the bind() phase. + """ + proxy_type, proxy_addr, proxy_port, rdns, username, password = self.proxy + if not proxy_type or self.type != socket.SOCK_DGRAM: + return _orig_socket.bind(self, *pos, **kw) + + if self._proxyconn: + raise socket.error(EINVAL, "Socket already bound to an address") + if proxy_type != SOCKS5: + msg = "UDP only supported by SOCKS5 proxy type" + raise socket.error(EOPNOTSUPP, msg) + _BaseSocket.bind(self, *pos, **kw) + + # Need to specify actual local port because + # some relays drop packets if a port of zero is specified. + # Avoid specifying host address in case of NAT though. + _, port = self.getsockname() + dst = ("0", port) + + self._proxyconn = _orig_socket() + proxy = self._proxy_addr() + self._proxyconn.connect(proxy) + + UDP_ASSOCIATE = b"\x03" + _, relay = self._SOCKS5_request(self._proxyconn, UDP_ASSOCIATE, dst) + + # The relay is most likely on the same host as the SOCKS proxy, + # but some proxies return a private IP address (10.x.y.z) + host, _ = proxy + _, port = relay + _BaseSocket.connect(self, (host, port)) + self.proxy_sockname = ("0.0.0.0", 0) # Unknown + + def sendto(self, bytes, *args, **kwargs): + if self.type != socket.SOCK_DGRAM: + return _BaseSocket.sendto(self, bytes, *args, **kwargs) + if not self._proxyconn: + self.bind(("", 0)) + + address = args[-1] + flags = args[:-1] + + header = BytesIO() + RSV = b"\x00\x00" + header.write(RSV) + STANDALONE = b"\x00" + header.write(STANDALONE) + self._write_SOCKS5_address(address, header) + + sent = _BaseSocket.send(self, header.getvalue() + bytes, *flags, **kwargs) + return sent - header.tell() + + def send(self, bytes, flags=0, **kwargs): + if self.type == socket.SOCK_DGRAM: + return self.sendto(bytes, flags, self.proxy_peername, **kwargs) + else: + return _BaseSocket.send(self, bytes, flags, **kwargs) + + def recvfrom(self, bufsize, flags=0): + if self.type != socket.SOCK_DGRAM: + return _BaseSocket.recvfrom(self, bufsize, flags) + if not self._proxyconn: + self.bind(("", 0)) + + buf = BytesIO(_BaseSocket.recv(self, bufsize, flags)) + buf.seek(+2, SEEK_CUR) + frag = buf.read(1) + if ord(frag): + raise NotImplementedError("Received UDP packet fragment") + fromhost, fromport = self._read_SOCKS5_address(buf) + + if self.proxy_peername: + peerhost, peerport = self.proxy_peername + if fromhost != peerhost or peerport not in (0, fromport): + raise socket.error(EAGAIN, "Packet filtered") + + return (buf.read(), (fromhost, fromport)) + + def recv(self, *pos, **kw): + bytes, _ = self.recvfrom(*pos, **kw) + return bytes + + def close(self): + if self._proxyconn: + self._proxyconn.close() + return _BaseSocket.close(self) + + def get_proxy_sockname(self): + """ + Returns the bound IP address and port number at the proxy. + """ + return self.proxy_sockname + + getproxysockname = get_proxy_sockname + + def get_proxy_peername(self): + """ + Returns the IP and port number of the proxy. + """ + return _BaseSocket.getpeername(self) + + getproxypeername = get_proxy_peername + + def get_peername(self): + """ + Returns the IP address and port number of the destination + machine (note: get_proxy_peername returns the proxy) + """ + return self.proxy_peername + + getpeername = get_peername + + def _negotiate_SOCKS5(self, *dest_addr): + """ + Negotiates a stream connection through a SOCKS5 server. + """ + CONNECT = b"\x01" + self.proxy_peername, self.proxy_sockname = self._SOCKS5_request(self, + CONNECT, dest_addr) + + def _SOCKS5_request(self, conn, cmd, dst): + """ + Send SOCKS5 request with given command (CMD field) and + address (DST field). Returns resolved DST address that was used. + """ + proxy_type, addr, port, rdns, username, password = self.proxy + + writer = conn.makefile("wb") + reader = conn.makefile("rb", 0) # buffering=0 renamed in Python 3 + try: + # First we'll send the authentication packages we support. + if username and password: + # The username/password details were supplied to the + # set_proxy method so we support the USERNAME/PASSWORD + # authentication (in addition to the standard none). + writer.write(b"\x05\x02\x00\x02") + else: + # No username/password were entered, therefore we + # only support connections with no authentication. + writer.write(b"\x05\x01\x00") + + # We'll receive the server's response to determine which + # method was selected + writer.flush() + chosen_auth = self._readall(reader, 2) + + if chosen_auth[0:1] != b"\x05": + # Note: string[i:i+1] is used because indexing of a bytestring + # via bytestring[i] yields an integer in Python 3 + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + + # Check the chosen authentication method + + if chosen_auth[1:2] == b"\x02": + # Okay, we need to perform a basic username/password + # authentication. + writer.write(b"\x01" + chr(len(username)).encode() + + username + + chr(len(password)).encode() + + password) + writer.flush() + auth_status = self._readall(reader, 2) + if auth_status[0:1] != b"\x01": + # Bad response + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + if auth_status[1:2] != b"\x00": + # Authentication failed + raise SOCKS5AuthError("SOCKS5 authentication failed") + + # Otherwise, authentication succeeded + + # No authentication is required if 0x00 + elif chosen_auth[1:2] != b"\x00": + # Reaching here is always bad + if chosen_auth[1:2] == b"\xFF": + raise SOCKS5AuthError("All offered SOCKS5 authentication methods were rejected") + else: + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + + # Now we can request the actual connection + writer.write(b"\x05" + cmd + b"\x00") + resolved = self._write_SOCKS5_address(dst, writer) + writer.flush() + + # Get the response + resp = self._readall(reader, 3) + if resp[0:1] != b"\x05": + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + + status = ord(resp[1:2]) + if status != 0x00: + # Connection failed: server returned an error + error = SOCKS5_ERRORS.get(status, "Unknown error") + raise SOCKS5Error("{0:#04x}: {1}".format(status, error)) + + # Get the bound address/port + bnd = self._read_SOCKS5_address(reader) + return (resolved, bnd) + finally: + reader.close() + writer.close() + + def _write_SOCKS5_address(self, addr, file): + """ + Return the host and port packed for the SOCKS5 protocol, + and the resolved address as a tuple object. + """ + host, port = addr + proxy_type, _, _, rdns, username, password = self.proxy + + # If the given destination address is an IP address, we'll + # use the IPv4 address request even if remote resolving was specified. + try: + addr_bytes = socket.inet_aton(host) + file.write(b"\x01" + addr_bytes) + host = socket.inet_ntoa(addr_bytes) + except socket.error: + # Well it's not an IP number, so it's probably a DNS name. + if rdns: + # Resolve remotely + host_bytes = host.encode('idna') + file.write(b"\x03" + chr(len(host_bytes)).encode() + host_bytes) + else: + # Resolve locally + addr_bytes = socket.inet_aton(socket.gethostbyname(host)) + file.write(b"\x01" + addr_bytes) + host = socket.inet_ntoa(addr_bytes) + + file.write(struct.pack(">H", port)) + return host, port + + def _read_SOCKS5_address(self, file): + atyp = self._readall(file, 1) + if atyp == b"\x01": + addr = socket.inet_ntoa(self._readall(file, 4)) + elif atyp == b"\x03": + length = self._readall(file, 1) + addr = self._readall(file, ord(length)) + else: + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + + port = struct.unpack(">H", self._readall(file, 2))[0] + return addr, port + + def _negotiate_SOCKS4(self, dest_addr, dest_port): + """ + Negotiates a connection through a SOCKS4 server. + """ + proxy_type, addr, port, rdns, username, password = self.proxy + + writer = self.makefile("wb") + reader = self.makefile("rb", 0) # buffering=0 renamed in Python 3 + try: + # Check if the destination address provided is an IP address + remote_resolve = False + try: + addr_bytes = socket.inet_aton(dest_addr) + except socket.error: + # It's a DNS name. Check where it should be resolved. + if rdns: + addr_bytes = b"\x00\x00\x00\x01" + remote_resolve = True + else: + addr_bytes = socket.inet_aton(socket.gethostbyname(dest_addr)) + + # Construct the request packet + writer.write(struct.pack(">BBH", 0x04, 0x01, dest_port)) + writer.write(addr_bytes) + + # The username parameter is considered userid for SOCKS4 + if username: + writer.write(username) + writer.write(b"\x00") + + # DNS name if remote resolving is required + # NOTE: This is actually an extension to the SOCKS4 protocol + # called SOCKS4A and may not be supported in all cases. + if remote_resolve: + writer.write(dest_addr.encode('idna') + b"\x00") + writer.flush() + + # Get the response from the server + resp = self._readall(reader, 8) + if resp[0:1] != b"\x00": + # Bad data + raise GeneralProxyError("SOCKS4 proxy server sent invalid data") + + status = ord(resp[1:2]) + if status != 0x5A: + # Connection failed: server returned an error + error = SOCKS4_ERRORS.get(status, "Unknown error") + raise SOCKS4Error("{0:#04x}: {1}".format(status, error)) + + # Get the bound address/port + self.proxy_sockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0]) + if remote_resolve: + self.proxy_peername = socket.inet_ntoa(addr_bytes), dest_port + else: + self.proxy_peername = dest_addr, dest_port + finally: + reader.close() + writer.close() + + def _negotiate_HTTP(self, dest_addr, dest_port): + """ + Negotiates a connection through an HTTP server. + NOTE: This currently only supports HTTP CONNECT-style proxies. + """ + proxy_type, addr, port, rdns, username, password = self.proxy + + # If we need to resolve locally, we do this now + addr = dest_addr if rdns else socket.gethostbyname(dest_addr) + + self.sendall(b"CONNECT " + addr.encode('idna') + b":" + str(dest_port).encode() + + b" HTTP/1.1\r\n" + b"Host: " + dest_addr.encode('idna') + b"\r\n\r\n") + + # We just need the first line to check if the connection was successful + fobj = self.makefile() + status_line = fobj.readline() + fobj.close() + + if not status_line: + raise GeneralProxyError("Connection closed unexpectedly") + + try: + proto, status_code, status_msg = status_line.split(" ", 2) + except ValueError: + raise GeneralProxyError("HTTP proxy server sent invalid response") + + if not proto.startswith("HTTP/"): + raise GeneralProxyError("Proxy server does not appear to be an HTTP proxy") + + try: + status_code = int(status_code) + except ValueError: + raise HTTPError("HTTP proxy server did not return a valid HTTP status") + + if status_code != 200: + error = "{0}: {1}".format(status_code, status_msg) + if status_code in (400, 403, 405): + # It's likely that the HTTP proxy server does not support the CONNECT tunneling method + error += ("\n[*] Note: The HTTP proxy server may not be supported by PySocks" + " (must be a CONNECT tunnel proxy)") + raise HTTPError(error) + + self.proxy_sockname = (b"0.0.0.0", 0) + self.proxy_peername = addr, dest_port + + _proxy_negotiators = { + SOCKS4: _negotiate_SOCKS4, + SOCKS5: _negotiate_SOCKS5, + HTTP: _negotiate_HTTP + } + + + def connect(self, dest_pair): + """ + Connects to the specified destination through a proxy. + Uses the same API as socket's connect(). + To select the proxy server, use set_proxy(). + + dest_pair - 2-tuple of (IP/hostname, port). + """ + dest_addr, dest_port = dest_pair + + if self.type == socket.SOCK_DGRAM: + if not self._proxyconn: + self.bind(("", 0)) + dest_addr = socket.gethostbyname(dest_addr) + + # If the host address is INADDR_ANY or similar, reset the peer + # address so that packets are received from any peer + if dest_addr == "0.0.0.0" and not dest_port: + self.proxy_peername = None + else: + self.proxy_peername = (dest_addr, dest_port) + return + + proxy_type, proxy_addr, proxy_port, rdns, username, password = self.proxy + + # Do a minimal input check first + if (not isinstance(dest_pair, (list, tuple)) + or len(dest_pair) != 2 + or not dest_addr + or not isinstance(dest_port, int)): + raise GeneralProxyError("Invalid destination-connection (host, port) pair") + + + if proxy_type is None: + # Treat like regular socket object + self.proxy_peername = dest_pair + _BaseSocket.connect(self, (dest_addr, dest_port)) + return + + proxy_addr = self._proxy_addr() + + try: + # Initial connection to proxy server + _BaseSocket.connect(self, proxy_addr) + + except socket.error as error: + # Error while connecting to proxy + self.close() + proxy_addr, proxy_port = proxy_addr + proxy_server = "{0}:{1}".format(proxy_addr, proxy_port) + printable_type = PRINTABLE_PROXY_TYPES[proxy_type] + + msg = "Error connecting to {0} proxy {1}".format(printable_type, + proxy_server) + raise ProxyConnectionError(msg, error) + + else: + # Connected to proxy server, now negotiate + try: + # Calls negotiate_{SOCKS4, SOCKS5, HTTP} + negotiate = self._proxy_negotiators[proxy_type] + negotiate(self, dest_addr, dest_port) + except socket.error as error: + # Wrap socket errors + self.close() + raise GeneralProxyError("Socket error", error) + except ProxyError: + # Protocol error while negotiating with proxy + self.close() + raise + + def _proxy_addr(self): + """ + Return proxy address to connect to as tuple object + """ + proxy_type, proxy_addr, proxy_port, rdns, username, password = self.proxy + proxy_port = proxy_port or DEFAULT_PORTS.get(proxy_type) + if not proxy_port: + raise GeneralProxyError("Invalid proxy type") + return proxy_addr, proxy_port diff --git a/src/lib/PySocks/sockshandler.py b/src/lib/PySocks/sockshandler.py new file mode 100644 index 00000000..26c83439 --- /dev/null +++ b/src/lib/PySocks/sockshandler.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python +""" +SocksiPy + urllib2 handler + +version: 0.3 +author: e + +This module provides a Handler which you can use with urllib2 to allow it to tunnel your connection through a socks.sockssocket socket, with out monkey patching the original socket... +""" +import ssl + +try: + import urllib2 + import httplib +except ImportError: # Python 3 + import urllib.request as urllib2 + import http.client as httplib + +import socks # $ pip install PySocks + +def merge_dict(a, b): + d = a.copy() + d.update(b) + return d + +class SocksiPyConnection(httplib.HTTPConnection): + def __init__(self, proxytype, proxyaddr, proxyport=None, rdns=True, username=None, password=None, *args, **kwargs): + self.proxyargs = (proxytype, proxyaddr, proxyport, rdns, username, password) + httplib.HTTPConnection.__init__(self, *args, **kwargs) + + def connect(self): + self.sock = socks.socksocket() + self.sock.setproxy(*self.proxyargs) + if type(self.timeout) in (int, float): + self.sock.settimeout(self.timeout) + self.sock.connect((self.host, self.port)) + +class SocksiPyConnectionS(httplib.HTTPSConnection): + def __init__(self, proxytype, proxyaddr, proxyport=None, rdns=True, username=None, password=None, *args, **kwargs): + self.proxyargs = (proxytype, proxyaddr, proxyport, rdns, username, password) + httplib.HTTPSConnection.__init__(self, *args, **kwargs) + + def connect(self): + sock = socks.socksocket() + sock.setproxy(*self.proxyargs) + if type(self.timeout) in (int, float): + sock.settimeout(self.timeout) + sock.connect((self.host, self.port)) + self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file) + +class SocksiPyHandler(urllib2.HTTPHandler, urllib2.HTTPSHandler): + def __init__(self, *args, **kwargs): + self.args = args + self.kw = kwargs + urllib2.HTTPHandler.__init__(self) + + def http_open(self, req): + def build(host, port=None, timeout=0, **kwargs): + kw = merge_dict(self.kw, kwargs) + conn = SocksiPyConnection(*self.args, host=host, port=port, timeout=timeout, **kw) + return conn + return self.do_open(build, req) + + def https_open(self, req): + def build(host, port=None, timeout=0, **kwargs): + kw = merge_dict(self.kw, kwargs) + conn = SocksiPyConnectionS(*self.args, host=host, port=port, timeout=timeout, **kw) + return conn + return self.do_open(build, req) + +if __name__ == "__main__": + import sys + try: + port = int(sys.argv[1]) + except (ValueError, IndexError): + port = 9050 + opener = urllib2.build_opener(SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, "localhost", port)) + print("HTTP: " + opener.open("http://httpbin.org/ip").read().decode()) + print("HTTPS: " + opener.open("https://httpbin.org/ip").read().decode()) diff --git a/src/lib/PySocks/test/README b/src/lib/PySocks/test/README new file mode 100644 index 00000000..e08608ef --- /dev/null +++ b/src/lib/PySocks/test/README @@ -0,0 +1,5 @@ +Very rudimentary tests for Python 2 and Python 3. + +Requirements: tornado, twisted (available through pip) + +./test.sh diff --git a/src/lib/PySocks/test/httpproxy.py b/src/lib/PySocks/test/httpproxy.py new file mode 100644 index 00000000..df0ad031 --- /dev/null +++ b/src/lib/PySocks/test/httpproxy.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python +# +# Simple asynchronous HTTP proxy with tunnelling (CONNECT). +# +# GET/POST proxying based on +# http://groups.google.com/group/python-tornado/msg/7bea08e7a049cf26 +# +# Copyright (C) 2012 Senko Rasic +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import sys +import socket + +import tornado.httpserver +import tornado.ioloop +import tornado.iostream +import tornado.web +import tornado.httpclient + +__all__ = ['ProxyHandler', 'run_proxy'] + + +class ProxyHandler(tornado.web.RequestHandler): + SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT'] + + @tornado.web.asynchronous + def get(self): + + def handle_response(response): + if response.error and not isinstance(response.error, + tornado.httpclient.HTTPError): + self.set_status(500) + self.write('Internal server error:\n' + str(response.error)) + self.finish() + else: + self.set_status(response.code) + for header in ('Date', 'Cache-Control', 'Server', + 'Content-Type', 'Location'): + v = response.headers.get(header) + if v: + self.set_header(header, v) + if response.body: + self.write(response.body) + self.finish() + + req = tornado.httpclient.HTTPRequest(url=self.request.uri, + method=self.request.method, body=self.request.body, + headers=self.request.headers, follow_redirects=False, + allow_nonstandard_methods=True) + + client = tornado.httpclient.AsyncHTTPClient() + try: + client.fetch(req, handle_response) + except tornado.httpclient.HTTPError as e: + if hasattr(e, 'response') and e.response: + self.handle_response(e.response) + else: + self.set_status(500) + self.write('Internal server error:\n' + str(e)) + self.finish() + + @tornado.web.asynchronous + def post(self): + return self.get() + + @tornado.web.asynchronous + def connect(self): + host, port = self.request.uri.split(':') + client = self.request.connection.stream + + def read_from_client(data): + upstream.write(data) + + def read_from_upstream(data): + client.write(data) + + def client_close(data=None): + if upstream.closed(): + return + if data: + upstream.write(data) + upstream.close() + + def upstream_close(data=None): + if client.closed(): + return + if data: + client.write(data) + client.close() + + def start_tunnel(): + client.read_until_close(client_close, read_from_client) + upstream.read_until_close(upstream_close, read_from_upstream) + client.write(b'HTTP/1.0 200 Connection established\r\n\r\n') + + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) + upstream = tornado.iostream.IOStream(s) + upstream.connect((host, int(port)), start_tunnel) + + +def run_proxy(port=8080, start_ioloop=True): + """ + Run proxy on the specified port. If start_ioloop is True (default), + the tornado IOLoop will be started immediately. + """ + app = tornado.web.Application([ + (r'.*', ProxyHandler), + ]) + app.listen(port, address="127.0.0.1") + ioloop = tornado.ioloop.IOLoop.instance() + if start_ioloop: + ioloop.start() + +if __name__ == '__main__': + port = 8081 + if len(sys.argv) > 1: + port = int(sys.argv[1]) + + print ("Running HTTP proxy server") + run_proxy(port) diff --git a/src/lib/PySocks/test/mocks b/src/lib/PySocks/test/mocks new file mode 100644 index 00000000..5299a3f4 Binary files /dev/null and b/src/lib/PySocks/test/mocks differ diff --git a/src/lib/PySocks/test/mocks.conf b/src/lib/PySocks/test/mocks.conf new file mode 100644 index 00000000..ab5ef590 --- /dev/null +++ b/src/lib/PySocks/test/mocks.conf @@ -0,0 +1,104 @@ +################################################# +# # +# Sample configuration file for MOCKS 0.0.2 # +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # +# # +# I recommend reading the examples in this file # +# and then extending it to suite your needs. # +# # +################################################# + + + +######################### +# +# General daemon config +# ~~~~~~~~~~~~~~~~~~~~~ +# +######################### + +PORT = 1081 # Port MOCKS is to listen to +MOCKS_ADDR = 127.0.0.1 # IP adress MOCKS is to bind to +LOG_FILE = mocks.log # MOCKS log file +PID_FILE = mocks.pid # File holding MOCKS's process ID +BUFFER_SIZE = 65536 # Traffic buffer size in bytes +BACKLOG = 5 # Backlog for listen() +NEGOTIATION_TIMEOUT = 5 +CONNECTION_IDLE_TIMEOUT = 300 +BIND_TIMEOUT = 30 +SHUTDOWN_TIMEOUT = 3 +MAX_CONNECTIONS = 50 + + + +########################################################################## +# +# Client filter config +# ~~~~~~~~~~~~~~~~~~~~ +# +# Client filtering means sorting out which clients are allowed +# connection and which are not. This is basically done like this: +# MOCKS has a default behaviour regarding filtering client +# connections. This behaviour is called the 'policy' and can either +# be to ALLOW or to DENY the connection. After setting the policy +# you can specify a list of exceptions. The action MOCKS takes +# for a client matching any of these exceptions is the opposite +# of the policy (that is, if the policy is set to ALLOW the exceptions +# are denied and if the policy is set to DENY the exceptions are allowed). +# An exception is specified in the form ip_address/mask, where mask +# is optional and is an integer ranging from 0 to 32 identifying the +# number of common heading bits that ip_address and the client's IP +# address must have in order to yield a match. If mask is missing, +# 32 will be assumed. For instance, 192.168.1.0/24 will match any IP +# ranging from 192.168.1.1 to 192.168.1.255. +# +# Let's take two examples, one for each type of policy. Let's say we +# only want to allow IPs 10.12.0.0 through 10.12.255.255, 172.23.2.5 and +# 192.168.52.26 to use MOCKS. What we have to to is this: +# +# FILTER_POLICY = DENY +# FILTER_EXCEPTION = 10.12.0.0/16 +# FILTER_EXCEPTION = 172.23.2.5 # implied /32 +# FILTER_EXCEPTION = 192.168.52.26 # implied /32 +# +# Now, let's say this is a public proxy server, but for some reason +# you don't want to let any IP ranging from 192.168.1.1 to 192.168.1.255 +# and neither 10.2.5.13 to connect to it: +# +# FILTER_POLICY = ALLOW +# FILTER_EXCEPTION = 192.168.1.0/24 +# FILTER_EXCEPTION = 10.2.5.13 +# +########################################################################### + +FILTER_POLICY = ALLOW + + + +############################################################################# +# +# Upstream proxy config +# ~~~~~~~~~~~~~~~~~~~~~ +# +# You can choose to further relay traffic through another proxy server. +# MOCKS supports upstream HTTP CONNECT, SOCKS4 and SOCKS5 proxies. You +# must specify the proxy type (one of HTTPCONNECT, SOCKS4 or SOCKS5), the +# proxy address and the proxy port. Optionally you can specify an user +# name and a password used to authenicate to the upstream proxy. This is +# pretty straight forward, so let's just take an example. Let's say you +# want to use the HTTP CONNECT server at httpconnectproxy.com, on port 3128, +# using the username 'foo' and the password 'bar'. You do it like this: +# +# UP_PROXY_TYPE = HTTPCONNECT +# UP_PROXY_ADDR = httpconnectproxy.com +# UP_PROXY_PORT = 3128 +# UP_PROXY_USER = foo # These two can be missing if you +# UP_PROXY_PASSWD = bar # are not required to authenticate +# +############################################################################# + +# UP_PROXY_TYPE = HTTPCONNECT +# UP_PROXY_ADDR = 192.168.1.12 +# UP_PROXY_PORT = 3128 + + diff --git a/src/lib/PySocks/test/socks4server.py b/src/lib/PySocks/test/socks4server.py new file mode 100644 index 00000000..05a54b93 --- /dev/null +++ b/src/lib/PySocks/test/socks4server.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python +from twisted.internet import reactor +from twisted.protocols.socks import SOCKSv4Factory + +def run_proxy(): + reactor.listenTCP(1080, SOCKSv4Factory("/dev/null"), interface="127.0.0.1") + try: + reactor.run() + except (KeyboardInterrupt, SystemExit): + reactor.stop() + +if __name__ == "__main__": + print "Running SOCKS4 proxy server" + run_proxy() diff --git a/src/lib/PySocks/test/sockstest.py b/src/lib/PySocks/test/sockstest.py new file mode 100644 index 00000000..526cb3fc --- /dev/null +++ b/src/lib/PySocks/test/sockstest.py @@ -0,0 +1,174 @@ +import sys +sys.path.append("..") +import socks +import socket + +PY3K = sys.version_info[0] == 3 + +if PY3K: + import urllib.request as urllib2 +else: + import sockshandler + import urllib2 + +def raw_HTTP_request(): + req = "GET /ip HTTP/1.1\r\n" + req += "Host: ifconfig.me\r\n" + req += "User-Agent: Mozilla\r\n" + req += "Accept: text/html\r\n" + req += "\r\n" + return req.encode() + +def socket_HTTP_test(): + s = socks.socksocket() + s.set_proxy(socks.HTTP, "127.0.0.1", 8081) + s.connect(("ifconfig.me", 80)) + s.sendall(raw_HTTP_request()) + status = s.recv(2048).splitlines()[0] + assert status.startswith(b"HTTP/1.1 200") + +def socket_SOCKS4_test(): + s = socks.socksocket() + s.set_proxy(socks.SOCKS4, "127.0.0.1", 1080) + s.connect(("ifconfig.me", 80)) + s.sendall(raw_HTTP_request()) + status = s.recv(2048).splitlines()[0] + assert status.startswith(b"HTTP/1.1 200") + +def socket_SOCKS5_test(): + s = socks.socksocket() + s.set_proxy(socks.SOCKS5, "127.0.0.1", 1081) + s.connect(("ifconfig.me", 80)) + s.sendall(raw_HTTP_request()) + status = s.recv(2048).splitlines()[0] + assert status.startswith(b"HTTP/1.1 200") + +def SOCKS5_connect_timeout_test(): + s = socks.socksocket() + s.settimeout(0.0001) + s.set_proxy(socks.SOCKS5, "8.8.8.8", 80) + try: + s.connect(("ifconfig.me", 80)) + except socks.ProxyConnectionError as e: + assert str(e.socket_err) == "timed out" + +def SOCKS5_timeout_test(): + s = socks.socksocket() + s.settimeout(0.0001) + s.set_proxy(socks.SOCKS5, "127.0.0.1", 1081) + try: + s.connect(("ifconfig.me", 4444)) + except socks.GeneralProxyError as e: + assert str(e.socket_err) == "timed out" + + +def socket_SOCKS5_auth_test(): + # TODO: add support for this test. Will need a better SOCKS5 server. + s = socks.socksocket() + s.set_proxy(socks.SOCKS5, "127.0.0.1", 1081, username="a", password="b") + s.connect(("ifconfig.me", 80)) + s.sendall(raw_HTTP_request()) + status = s.recv(2048).splitlines()[0] + assert status.startswith(b"HTTP/1.1 200") + +def socket_HTTP_IP_test(): + s = socks.socksocket() + s.set_proxy(socks.HTTP, "127.0.0.1", 8081) + s.connect(("133.242.129.236", 80)) + s.sendall(raw_HTTP_request()) + status = s.recv(2048).splitlines()[0] + assert status.startswith(b"HTTP/1.1 200") + +def socket_SOCKS4_IP_test(): + s = socks.socksocket() + s.set_proxy(socks.SOCKS4, "127.0.0.1", 1080) + s.connect(("133.242.129.236", 80)) + s.sendall(raw_HTTP_request()) + status = s.recv(2048).splitlines()[0] + assert status.startswith(b"HTTP/1.1 200") + +def socket_SOCKS5_IP_test(): + s = socks.socksocket() + s.set_proxy(socks.SOCKS5, "127.0.0.1", 1081) + s.connect(("133.242.129.236", 80)) + s.sendall(raw_HTTP_request()) + status = s.recv(2048).splitlines()[0] + assert status.startswith(b"HTTP/1.1 200") + +def urllib2_HTTP_test(): + socks.set_default_proxy(socks.HTTP, "127.0.0.1", 8081) + socks.wrap_module(urllib2) + status = urllib2.urlopen("http://ifconfig.me/ip").getcode() + assert status == 200 + +def urllib2_SOCKS5_test(): + socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1081) + socks.wrap_module(urllib2) + status = urllib2.urlopen("http://ifconfig.me/ip").getcode() + assert status == 200 + +def urllib2_handler_HTTP_test(): + opener = urllib2.build_opener(sockshandler.SocksiPyHandler(socks.HTTP, "127.0.0.1", 8081)) + status = opener.open("http://ifconfig.me/ip").getcode() + assert status == 200 + +def urllib2_handler_SOCKS5_test(): + opener = urllib2.build_opener(sockshandler.SocksiPyHandler(socks.SOCKS5, "127.0.0.1", 1081)) + status = opener.open("http://ifconfig.me/ip").getcode() + assert status == 200 + +def global_override_HTTP_test(): + socks.set_default_proxy(socks.HTTP, "127.0.0.1", 8081) + good = socket.socket + socket.socket = socks.socksocket + status = urllib2.urlopen("http://ifconfig.me/ip").getcode() + socket.socket = good + assert status == 200 + +def global_override_SOCKS5_test(): + default_proxy = (socks.SOCKS5, "127.0.0.1", 1081) + socks.set_default_proxy(*default_proxy) + good = socket.socket + socket.socket = socks.socksocket + status = urllib2.urlopen("http://ifconfig.me/ip").getcode() + socket.socket = good + assert status == 200 + assert socks.get_default_proxy()[1].decode() == default_proxy[1] + + +def main(): + print("Running tests...") + socket_HTTP_test() + print("1/12") + socket_SOCKS4_test() + print("2/12") + socket_SOCKS5_test() + print("3/12") + if not PY3K: + urllib2_handler_HTTP_test() + print("3.33/12") + urllib2_handler_SOCKS5_test() + print("3.66/12") + socket_HTTP_IP_test() + print("4/12") + socket_SOCKS4_IP_test() + print("5/12") + socket_SOCKS5_IP_test() + print("6/12") + SOCKS5_connect_timeout_test() + print("7/12") + SOCKS5_timeout_test() + print("8/12") + urllib2_HTTP_test() + print("9/12") + urllib2_SOCKS5_test() + print("10/12") + global_override_HTTP_test() + print("11/12") + global_override_SOCKS5_test() + print("12/12") + print("All tests ran successfully") + + +if __name__ == "__main__": + main() diff --git a/src/lib/PySocks/test/test.sh b/src/lib/PySocks/test/test.sh new file mode 100644 index 00000000..18479b9f --- /dev/null +++ b/src/lib/PySocks/test/test.sh @@ -0,0 +1,25 @@ +#!/bin/bash +shopt -s expand_aliases +type python2 >/dev/null 2>&1 || alias python2='python' + +echo "Starting proxy servers..." +python2 socks4server.py > /dev/null & +python2 httpproxy.py > /dev/null & +./mocks start + +sleep 2 +echo "Python 2.6 tests" +python2.6 sockstest.py +exit + +sleep 2 +echo "Python 2.7 tests" +python2.7 sockstest.py + +sleep 2 +echo "Python 3.x tests" +python3 sockstest.py + +pkill python2 > /dev/null +./mocks shutdown +echo "Finished tests" diff --git a/src/lib/bencode/__init__.py b/src/lib/bencode/__init__.py new file mode 100644 index 00000000..c484c651 --- /dev/null +++ b/src/lib/bencode/__init__.py @@ -0,0 +1,7 @@ +try: + string_type = basestring +except NameError: + string_type = str + +from .encode import encode +from .decode import decode diff --git a/src/lib/bencode/decode.py b/src/lib/bencode/decode.py new file mode 100644 index 00000000..65362490 --- /dev/null +++ b/src/lib/bencode/decode.py @@ -0,0 +1,141 @@ +import itertools +import collections + +from . import string_type + +try: + range = xrange +except NameError: + pass + +def decode(data): + ''' + Bdecodes data into Python built-in types. + ''' + + return consume(LookaheadIterator(data)) + +class LookaheadIterator(collections.Iterator): + ''' + An iterator that lets you peek at the next item. + ''' + + def __init__(self, iterator): + self.iterator, self.next_iterator = itertools.tee(iter(iterator)) + + # Be one step ahead + self._advance() + + def _advance(self): + self.next_item = next(self.next_iterator, None) + + def __next__(self): + self._advance() + + return next(self.iterator) + + # Python 2 compatibility + next = __next__ + +def consume(stream): + item = stream.next_item + + if item is None: + raise ValueError('Encoding empty data is undefined') + elif item == 'i': + return consume_int(stream) + elif item == 'l': + return consume_list(stream) + elif item == 'd': + return consume_dict(stream) + elif item is not None and item[0].isdigit(): + return consume_str(stream) + else: + raise ValueError('Invalid bencode object type: ', item) + +def consume_number(stream): + result = '' + + while True: + chunk = stream.next_item + + if not chunk.isdigit(): + return result + elif result.startswith('0'): + raise ValueError('Invalid number') + + next(stream) + result += chunk + +def consume_int(stream): + if next(stream) != 'i': + raise ValueError() + + negative = stream.next_item == '-' + + if negative: + next(stream) + + result = int(consume_number(stream)) + + if negative: + result *= -1 + + if result == 0: + raise ValueError('Negative zero is not allowed') + + if next(stream) != 'e': + raise ValueError('Unterminated integer') + + return result + +def consume_str(stream): + length = int(consume_number(stream)) + + if next(stream) != ':': + raise ValueError('Malformed string') + + result = '' + + for i in range(length): + try: + result += next(stream) + except StopIteration: + raise ValueError('Invalid string length') + + return result + +def consume_list(stream): + if next(stream) != 'l': + raise ValueError() + + l = [] + + while stream.next_item != 'e': + l.append(consume(stream)) + + if next(stream) != 'e': + raise ValueError('Unterminated list') + + return l + +def consume_dict(stream): + if next(stream) != 'd': + raise ValueError() + + d = {} + + while stream.next_item != 'e': + key = consume(stream) + + if not isinstance(key, string_type): + raise ValueError('Dictionary keys must be strings') + + value = consume(stream) + + d[key] = value + + if next(stream) != 'e': + raise ValueError('Unterminated dictionary') + + return d diff --git a/src/lib/bencode/encode.py b/src/lib/bencode/encode.py new file mode 100644 index 00000000..be98f994 --- /dev/null +++ b/src/lib/bencode/encode.py @@ -0,0 +1,22 @@ +from . import string_type + +def encode(obj): + ''' + Bencodes the object. The object must be an instance of: str, int, list, or dict. + ''' + + if isinstance(obj, string_type): + return '{0}:{1}'.format(len(obj), obj) + elif isinstance(obj, int): + return 'i{0}e'.format(obj) + elif isinstance(obj, list): + values = ''.join([encode(o) for o in obj]) + + return 'l{0}e'.format(values) + elif isinstance(obj, dict): + items = sorted(obj.items()) + values = ''.join([encode(str(key)) + encode(value) for key, value in items]) + + return 'd{0}e'.format(values) + else: + raise TypeError('Unsupported type: {0}. Must be one of: str, int, list, dict.'.format(type(obj))) diff --git a/src/lib/bencode_open/LICENSE b/src/lib/bencode_open/LICENSE deleted file mode 100644 index f0e46d71..00000000 --- a/src/lib/bencode_open/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2019 Ivan Machugovskiy - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/src/lib/bencode_open/__init__.py b/src/lib/bencode_open/__init__.py deleted file mode 100644 index e3c783cc..00000000 --- a/src/lib/bencode_open/__init__.py +++ /dev/null @@ -1,160 +0,0 @@ -def loads(data): - if not isinstance(data, bytes): - raise TypeError("Expected 'bytes' object, got {}".format(type(data))) - - offset = 0 - - - def parseInteger(): - nonlocal offset - - offset += 1 - had_digit = False - abs_value = 0 - - sign = 1 - if data[offset] == ord("-"): - sign = -1 - offset += 1 - while offset < len(data): - if data[offset] == ord("e"): - # End of string - offset += 1 - if not had_digit: - raise ValueError("Integer without value") - break - if ord("0") <= data[offset] <= ord("9"): - abs_value = abs_value * 10 + int(chr(data[offset])) - had_digit = True - offset += 1 - else: - raise ValueError("Invalid integer") - else: - raise ValueError("Unexpected EOF, expected integer") - - if not had_digit: - raise ValueError("Empty integer") - - return sign * abs_value - - - def parseString(): - nonlocal offset - - length = int(chr(data[offset])) - offset += 1 - - while offset < len(data): - if data[offset] == ord(":"): - offset += 1 - break - if ord("0") <= data[offset] <= ord("9"): - length = length * 10 + int(chr(data[offset])) - offset += 1 - else: - raise ValueError("Invalid string length") - else: - raise ValueError("Unexpected EOF, expected string contents") - - if offset + length > len(data): - raise ValueError("Unexpected EOF, expected string contents") - offset += length - - return data[offset - length:offset] - - - def parseList(): - nonlocal offset - - offset += 1 - values = [] - - while offset < len(data): - if data[offset] == ord("e"): - # End of list - offset += 1 - return values - else: - values.append(parse()) - - raise ValueError("Unexpected EOF, expected list contents") - - - def parseDict(): - nonlocal offset - - offset += 1 - items = {} - - while offset < len(data): - if data[offset] == ord("e"): - # End of list - offset += 1 - return items - else: - key, value = parse(), parse() - if not isinstance(key, bytes): - raise ValueError("A dict key must be a byte string") - if key in items: - raise ValueError("Duplicate dict key: {}".format(key)) - items[key] = value - - raise ValueError("Unexpected EOF, expected dict contents") - - - def parse(): - nonlocal offset - - if data[offset] == ord("i"): - return parseInteger() - elif data[offset] == ord("l"): - return parseList() - elif data[offset] == ord("d"): - return parseDict() - elif ord("0") <= data[offset] <= ord("9"): - return parseString() - - raise ValueError("Unknown type specifier: '{}'".format(chr(data[offset]))) - - result = parse() - - if offset != len(data): - raise ValueError("Expected EOF, got {} bytes left".format(len(data) - offset)) - - return result - - -def dumps(data): - result = bytearray() - - - def convert(data): - nonlocal result - - if isinstance(data, str): - raise ValueError("bencode only supports bytes, not str. Use encode") - - if isinstance(data, bytes): - result += str(len(data)).encode() + b":" + data - elif isinstance(data, int): - result += b"i" + str(data).encode() + b"e" - elif isinstance(data, list): - result += b"l" - for val in data: - convert(val) - result += b"e" - elif isinstance(data, dict): - result += b"d" - for key in sorted(data.keys()): - if not isinstance(key, bytes): - raise ValueError("Dict key can only be bytes, not {}".format(type(key))) - convert(key) - convert(data[key]) - result += b"e" - else: - raise ValueError("bencode only supports bytes, int, list and dict") - - - convert(data) - - return bytes(result) diff --git a/src/lib/cssvendor/cssvendor.py b/src/lib/cssvendor/cssvendor.py index b04d7cc3..3545f59c 100644 --- a/src/lib/cssvendor/cssvendor.py +++ b/src/lib/cssvendor/cssvendor.py @@ -3,26 +3,26 @@ import re def prefix(content): content = re.sub( - b"@keyframes (.*? {.*?}\s*})", b"@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n", + "@keyframes (.*? {.*?[^ ]})", "@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n", content, flags=re.DOTALL ) content = re.sub( - b'([^-\*])(border-radius|box-shadow|appearance|transition|animation|box-sizing|' + - b'backface-visibility|transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])', - b'\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content + '([^-\*])(border-radius|box-shadow|appearance|transition|animation|box-sizing|' + + 'backface-visibility|transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])', + '\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content ) content = re.sub( - b'(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])', - b'\\1: -webkit-\\2(\\3);' + - b'\\1: -moz-\\2(\\3);' + - b'\\1: -o-\\2(\\3);' + - b'\\1: -ms-\\2(\\3);' + - b'\\1: \\2(\\3);', content + '(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])', + '\\1: -webkit-\\2(\\3);' + + '\\1: -moz-\\2(\\3);' + + '\\1: -o-\\2(\\3);' + + '\\1: -ms-\\2(\\3);' + + '\\1: \\2(\\3);', content ) return content if __name__ == "__main__": - print(prefix(b""" + print prefix(""" .test { border-radius: 5px; background: linear-gradient(red, blue); @@ -36,4 +36,4 @@ if __name__ == "__main__": } - """).decode("utf8")) + """) diff --git a/src/lib/gevent_ws/__init__.py b/src/lib/gevent_ws/__init__.py deleted file mode 100644 index a157e94c..00000000 --- a/src/lib/gevent_ws/__init__.py +++ /dev/null @@ -1,279 +0,0 @@ -from gevent.pywsgi import WSGIHandler, _InvalidClientInput -from gevent.queue import Queue -import gevent -import hashlib -import base64 -import struct -import socket -import time -import sys - - -SEND_PACKET_SIZE = 1300 -OPCODE_TEXT = 1 -OPCODE_BINARY = 2 -OPCODE_CLOSE = 8 -OPCODE_PING = 9 -OPCODE_PONG = 10 -STATUS_OK = 1000 -STATUS_PROTOCOL_ERROR = 1002 -STATUS_DATA_ERROR = 1007 -STATUS_POLICY_VIOLATION = 1008 -STATUS_TOO_LONG = 1009 - - -class WebSocket: - def __init__(self, socket): - self.socket = socket - self.closed = False - self.status = None - self._receive_error = None - self._queue = Queue() - self.max_length = 10 * 1024 * 1024 - gevent.spawn(self._listen) - - - def set_max_message_length(self, length): - self.max_length = length - - - def _listen(self): - try: - while True: - fin = False - message = bytearray() - is_first_message = True - start_opcode = None - while not fin: - payload, opcode, fin = self._get_frame(max_length=self.max_length - len(message)) - # Make sure continuation frames have correct information - if not is_first_message and opcode != 0: - self._error(STATUS_PROTOCOL_ERROR) - if is_first_message: - if opcode not in (OPCODE_TEXT, OPCODE_BINARY): - self._error(STATUS_PROTOCOL_ERROR) - # Save opcode - start_opcode = opcode - message += payload - is_first_message = False - message = bytes(message) - if start_opcode == OPCODE_TEXT: # UTF-8 text - try: - message = message.decode() - except UnicodeDecodeError: - self._error(STATUS_DATA_ERROR) - self._queue.put(message) - except Exception as e: - self.closed = True - self._receive_error = e - self._queue.put(None) # To make sure the error is read - - - def receive(self): - if not self._queue.empty(): - return self.receive_nowait() - if isinstance(self._receive_error, EOFError): - return None - if self._receive_error: - raise self._receive_error - self._queue.peek() - return self.receive_nowait() - - - def receive_nowait(self): - ret = self._queue.get_nowait() - if self._receive_error and not isinstance(self._receive_error, EOFError): - raise self._receive_error - return ret - - - def send(self, data): - if self.closed: - raise EOFError() - if isinstance(data, str): - self._send_frame(OPCODE_TEXT, data.encode()) - elif isinstance(data, bytes): - self._send_frame(OPCODE_BINARY, data) - else: - raise TypeError("Expected str or bytes, got " + repr(type(data))) - - - # Reads a frame from the socket. Pings, pongs and close packets are handled - # automatically - def _get_frame(self, max_length): - while True: - payload, opcode, fin = self._read_frame(max_length=max_length) - if opcode == OPCODE_PING: - self._send_frame(OPCODE_PONG, payload) - elif opcode == OPCODE_PONG: - pass - elif opcode == OPCODE_CLOSE: - if len(payload) >= 2: - self.status = struct.unpack("!H", payload[:2])[0] - was_closed = self.closed - self.closed = True - if not was_closed: - # Send a close frame in response - self.close(STATUS_OK) - raise EOFError() - else: - return payload, opcode, fin - - - # Low-level function, use _get_frame instead - def _read_frame(self, max_length): - header = self._recv_exactly(2) - - if not (header[1] & 0x80): - self._error(STATUS_POLICY_VIOLATION) - - opcode = header[0] & 0xf - fin = bool(header[0] & 0x80) - - payload_length = header[1] & 0x7f - if payload_length == 126: - payload_length = struct.unpack("!H", self._recv_exactly(2))[0] - elif payload_length == 127: - payload_length = struct.unpack("!Q", self._recv_exactly(8))[0] - - # Control frames are handled in a special way - if opcode in (OPCODE_PING, OPCODE_PONG): - max_length = 125 - - if payload_length > max_length: - self._error(STATUS_TOO_LONG) - - mask = self._recv_exactly(4) - payload = self._recv_exactly(payload_length) - payload = self._unmask(payload, mask) - - return payload, opcode, fin - - - def _recv_exactly(self, length): - buf = bytearray() - while len(buf) < length: - block = self.socket.recv(min(4096, length - len(buf))) - if block == b"": - raise EOFError() - buf += block - return bytes(buf) - - - def _unmask(self, payload, mask): - def gen(c): - return bytes([x ^ c for x in range(256)]) - - - payload = bytearray(payload) - payload[0::4] = payload[0::4].translate(gen(mask[0])) - payload[1::4] = payload[1::4].translate(gen(mask[1])) - payload[2::4] = payload[2::4].translate(gen(mask[2])) - payload[3::4] = payload[3::4].translate(gen(mask[3])) - return bytes(payload) - - - def _send_frame(self, opcode, data): - for i in range(0, len(data), SEND_PACKET_SIZE): - part = data[i:i + SEND_PACKET_SIZE] - fin = int(i == (len(data) - 1) // SEND_PACKET_SIZE * SEND_PACKET_SIZE) - header = bytes( - [ - (opcode if i == 0 else 0) | (fin << 7), - min(len(part), 126) - ] - ) - if len(part) >= 126: - header += struct.pack("!H", len(part)) - self.socket.sendall(header + part) - - - def _error(self, status): - self.close(status) - raise EOFError() - - - def close(self, status=STATUS_OK): - self.closed = True - try: - self._send_frame(OPCODE_CLOSE, struct.pack("!H", status)) - except (BrokenPipeError, ConnectionResetError): - pass - self.socket.close() - - -class WebSocketHandler(WSGIHandler): - def handle_one_response(self): - self.time_start = time.time() - self.status = None - self.headers_sent = False - - self.result = None - self.response_use_chunked = False - self.response_length = 0 - - - http_connection = [s.strip().lower() for s in self.environ.get("HTTP_CONNECTION", "").split(",")] - if "upgrade" not in http_connection or self.environ.get("HTTP_UPGRADE", "").lower() != "websocket": - # Not my problem - return super(WebSocketHandler, self).handle_one_response() - - if "HTTP_SEC_WEBSOCKET_KEY" not in self.environ: - self.start_response("400 Bad Request", []) - return - - # Generate Sec-Websocket-Accept header - accept = self.environ["HTTP_SEC_WEBSOCKET_KEY"].encode() - accept += b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" - accept = base64.b64encode(hashlib.sha1(accept).digest()).decode() - - # Accept - self.start_response("101 Switching Protocols", [ - ("Upgrade", "websocket"), - ("Connection", "Upgrade"), - ("Sec-Websocket-Accept", accept) - ])(b"") - - self.environ["wsgi.websocket"] = WebSocket(self.socket) - - # Can't call super because it sets invalid flags like "status" - try: - try: - self.run_application() - finally: - try: - self.wsgi_input._discard() - except (socket.error, IOError): - pass - except _InvalidClientInput: - self._send_error_response_if_possible(400) - except socket.error as ex: - if ex.args[0] in self.ignored_socket_errors: - self.close_connection = True - else: - self.handle_error(*sys.exc_info()) - except: # pylint:disable=bare-except - self.handle_error(*sys.exc_info()) - finally: - self.time_finish = time.time() - self.log_request() - self.close_connection = True - - - def process_result(self): - if "wsgi.websocket" in self.environ: - if self.result is None: - return - # Flushing result is required for werkzeug compatibility - for elem in self.result: - pass - else: - super(WebSocketHandler, self).process_result() - - - @property - def version(self): - if not self.environ: - return None - - return self.environ.get('HTTP_SEC_WEBSOCKET_VERSION') diff --git a/src/lib/geventwebsocket/__init__.py b/src/lib/geventwebsocket/__init__.py new file mode 100644 index 00000000..7e2e0167 --- /dev/null +++ b/src/lib/geventwebsocket/__init__.py @@ -0,0 +1,21 @@ +VERSION = (0, 9, 3, 'final', 0) + +__all__ = [ + 'WebSocketApplication', + 'Resource', + 'WebSocketServer', + 'WebSocketError', + 'get_version' +] + + +def get_version(*args, **kwargs): + from .utils import get_version + return get_version(*args, **kwargs) + +try: + from .resource import WebSocketApplication, Resource + from .server import WebSocketServer + from .exceptions import WebSocketError +except ImportError: + pass diff --git a/src/lib/geventwebsocket/exceptions.py b/src/lib/geventwebsocket/exceptions.py new file mode 100644 index 00000000..e066727e --- /dev/null +++ b/src/lib/geventwebsocket/exceptions.py @@ -0,0 +1,19 @@ +from socket import error as socket_error + + +class WebSocketError(socket_error): + """ + Base class for all websocket errors. + """ + + +class ProtocolError(WebSocketError): + """ + Raised if an error occurs when de/encoding the websocket protocol. + """ + + +class FrameTooLargeException(ProtocolError): + """ + Raised if a frame is received that is too large. + """ diff --git a/src/lib/geventwebsocket/gunicorn/__init__.py b/src/lib/geventwebsocket/gunicorn/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/lib/geventwebsocket/gunicorn/workers.py b/src/lib/geventwebsocket/gunicorn/workers.py new file mode 100644 index 00000000..d0aa1369 --- /dev/null +++ b/src/lib/geventwebsocket/gunicorn/workers.py @@ -0,0 +1,6 @@ +from geventwebsocket.handler import WebSocketHandler +from gunicorn.workers.ggevent import GeventPyWSGIWorker + + +class GeventWebSocketWorker(GeventPyWSGIWorker): + wsgi_handler = WebSocketHandler diff --git a/src/lib/geventwebsocket/handler.py b/src/lib/geventwebsocket/handler.py new file mode 100644 index 00000000..be681cee --- /dev/null +++ b/src/lib/geventwebsocket/handler.py @@ -0,0 +1,283 @@ +# Modified: Werkzeug Debugger workaround in run_websocket(self): + +import base64 +import hashlib +import warnings + +from gevent.pywsgi import WSGIHandler +from .websocket import WebSocket, Stream +from .logging import create_logger + + +class Client(object): + def __init__(self, address, ws): + self.address = address + self.ws = ws + + +class WebSocketHandler(WSGIHandler): + """ + Automatically upgrades the connection to a websocket. + + To prevent the WebSocketHandler to call the underlying WSGI application, + but only setup the WebSocket negotiations, do: + + mywebsockethandler.prevent_wsgi_call = True + + before calling run_application(). This is useful if you want to do more + things before calling the app, and want to off-load the WebSocket + negotiations to this library. Socket.IO needs this for example, to send + the 'ack' before yielding the control to your WSGI app. + """ + + SUPPORTED_VERSIONS = ('13', '8', '7') + GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" + + def run_websocket(self): + """ + Called when a websocket has been created successfully. + """ + + if getattr(self, 'prevent_wsgi_call', False): + return + + # In case WebSocketServer is not used + if not hasattr(self.server, 'clients'): + self.server.clients = {} + + # Since we're now a websocket connection, we don't care what the + # application actually responds with for the http response + + try: + self.server.clients[self.client_address] = Client( + self.client_address, self.websocket) + if self.application.__class__.__name__ == "DebuggedApplication": # Modified: Werkzeug Debugger workaround (https://bitbucket.org/Jeffrey/gevent-websocket/issue/53/if-the-application-returns-a-generator-we) + list(self.application(self.environ, lambda s, h: [])) + else: + self.application(self.environ, lambda s, h: []) + finally: + del self.server.clients[self.client_address] + if not self.websocket.closed: + self.websocket.close() + self.environ.update({ + 'wsgi.websocket': None + }) + self.websocket = None + + def run_application(self): + if (hasattr(self.server, 'pre_start_hook') + and self.server.pre_start_hook): + self.logger.debug("Calling pre-start hook") + if self.server.pre_start_hook(self): + return super(WebSocketHandler, self).run_application() + + self.logger.debug("Initializing WebSocket") + self.result = self.upgrade_websocket() + + if hasattr(self, 'websocket'): + if self.status and not self.headers_sent: + self.write('') + + self.run_websocket() + else: + if self.status: + # A status was set, likely an error so just send the response + if not self.result: + self.result = [] + + self.process_result() + return + + # This handler did not handle the request, so defer it to the + # underlying application object + return super(WebSocketHandler, self).run_application() + + def upgrade_websocket(self): + """ + Attempt to upgrade the current environ into a websocket enabled + connection. If successful, the environ dict with be updated with two + new entries, `wsgi.websocket` and `wsgi.websocket_version`. + + :returns: Whether the upgrade was successful. + """ + + # Some basic sanity checks first + + self.logger.debug("Validating WebSocket request") + + if self.environ.get('REQUEST_METHOD', '') != 'GET': + # This is not a websocket request, so we must not handle it + self.logger.debug('Can only upgrade connection if using GET method.') + return + + upgrade = self.environ.get('HTTP_UPGRADE', '').lower() + + if upgrade == 'websocket': + connection = self.environ.get('HTTP_CONNECTION', '').lower() + + if 'upgrade' not in connection: + # This is not a websocket request, so we must not handle it + self.logger.warning("Client didn't ask for a connection " + "upgrade") + return + else: + # This is not a websocket request, so we must not handle it + return + + if self.request_version != 'HTTP/1.1': + self.start_response('402 Bad Request', []) + self.logger.warning("Bad server protocol in headers") + + return ['Bad protocol version'] + + if self.environ.get('HTTP_SEC_WEBSOCKET_VERSION'): + return self.upgrade_connection() + else: + self.logger.warning("No protocol defined") + self.start_response('426 Upgrade Required', [ + ('Sec-WebSocket-Version', ', '.join(self.SUPPORTED_VERSIONS))]) + + return ['No Websocket protocol version defined'] + + def upgrade_connection(self): + """ + Validate and 'upgrade' the HTTP request to a WebSocket request. + + If an upgrade succeeded then then handler will have `start_response` + with a status of `101`, the environ will also be updated with + `wsgi.websocket` and `wsgi.websocket_version` keys. + + :param environ: The WSGI environ dict. + :param start_response: The callable used to start the response. + :param stream: File like object that will be read from/written to by + the underlying WebSocket object, if created. + :return: The WSGI response iterator is something went awry. + """ + + self.logger.debug("Attempting to upgrade connection") + + version = self.environ.get("HTTP_SEC_WEBSOCKET_VERSION") + + if version not in self.SUPPORTED_VERSIONS: + msg = "Unsupported WebSocket Version: {0}".format(version) + + self.logger.warning(msg) + self.start_response('400 Bad Request', [ + ('Sec-WebSocket-Version', ', '.join(self.SUPPORTED_VERSIONS)) + ]) + + return [msg] + + key = self.environ.get("HTTP_SEC_WEBSOCKET_KEY", '').strip() + + if not key: + # 5.2.1 (3) + msg = "Sec-WebSocket-Key header is missing/empty" + + self.logger.warning(msg) + self.start_response('400 Bad Request', []) + + return [msg] + + try: + key_len = len(base64.b64decode(key)) + except TypeError: + msg = "Invalid key: {0}".format(key) + + self.logger.warning(msg) + self.start_response('400 Bad Request', []) + + return [msg] + + if key_len != 16: + # 5.2.1 (3) + msg = "Invalid key: {0}".format(key) + + self.logger.warning(msg) + self.start_response('400 Bad Request', []) + + return [msg] + + # Check for WebSocket Protocols + requested_protocols = self.environ.get( + 'HTTP_SEC_WEBSOCKET_PROTOCOL', '') + protocol = None + + if hasattr(self.application, 'app_protocol'): + allowed_protocol = self.application.app_protocol( + self.environ['PATH_INFO']) + + if allowed_protocol and allowed_protocol in requested_protocols: + protocol = allowed_protocol + self.logger.debug("Protocol allowed: {0}".format(protocol)) + + self.websocket = WebSocket(self.environ, Stream(self), self) + self.environ.update({ + 'wsgi.websocket_version': version, + 'wsgi.websocket': self.websocket + }) + + headers = [ + ("Upgrade", "websocket"), + ("Connection", "Upgrade"), + ("Sec-WebSocket-Accept", base64.b64encode( + hashlib.sha1(key + self.GUID).digest())), + ] + + if protocol: + headers.append(("Sec-WebSocket-Protocol", protocol)) + + self.logger.debug("WebSocket request accepted, switching protocols") + self.start_response("101 Switching Protocols", headers) + + @property + def logger(self): + if not hasattr(self.server, 'logger'): + self.server.logger = create_logger(__name__) + + return self.server.logger + + def log_request(self): + if '101' not in self.status: + self.logger.info(self.format_request()) + + @property + def active_client(self): + return self.server.clients[self.client_address] + + def start_response(self, status, headers, exc_info=None): + """ + Called when the handler is ready to send a response back to the remote + endpoint. A websocket connection may have not been created. + """ + writer = super(WebSocketHandler, self).start_response( + status, headers, exc_info=exc_info) + + self._prepare_response() + + return writer + + def _prepare_response(self): + """ + Sets up the ``pywsgi.Handler`` to work with a websocket response. + + This is used by other projects that need to support WebSocket + connections as part of a larger effort. + """ + assert not self.headers_sent + + if not self.environ.get('wsgi.websocket'): + # a WebSocket connection is not established, do nothing + return + + # So that `finalize_headers` doesn't write a Content-Length header + self.provided_content_length = False + + # The websocket is now controlling the response + self.response_use_chunked = False + + # Once the request is over, the connection must be closed + self.close_connection = True + + # Prevents the Date header from being written + self.provided_date = True diff --git a/src/lib/geventwebsocket/logging.py b/src/lib/geventwebsocket/logging.py new file mode 100644 index 00000000..554ca02d --- /dev/null +++ b/src/lib/geventwebsocket/logging.py @@ -0,0 +1,31 @@ +from __future__ import absolute_import + +from logging import getLogger, StreamHandler, getLoggerClass, Formatter, DEBUG + + +def create_logger(name, debug=False, format=None): + Logger = getLoggerClass() + + class DebugLogger(Logger): + def getEffectiveLevel(x): + if x.level == 0 and debug: + return DEBUG + else: + return Logger.getEffectiveLevel(x) + + class DebugHandler(StreamHandler): + def emit(x, record): + StreamHandler.emit(x, record) if debug else None + + handler = DebugHandler() + handler.setLevel(DEBUG) + + if format: + handler.setFormatter(Formatter(format)) + + logger = getLogger(name) + del logger.handlers[:] + logger.__class__ = DebugLogger + logger.addHandler(handler) + + return logger diff --git a/src/lib/geventwebsocket/protocols/__init__.py b/src/lib/geventwebsocket/protocols/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/lib/geventwebsocket/protocols/base.py b/src/lib/geventwebsocket/protocols/base.py new file mode 100644 index 00000000..1c05ab62 --- /dev/null +++ b/src/lib/geventwebsocket/protocols/base.py @@ -0,0 +1,35 @@ +class BaseProtocol(object): + PROTOCOL_NAME = '' + + def __init__(self, app): + self._app = app + + def on_open(self): + self.app.on_open() + + def on_message(self, message): + self.app.on_message(message) + + def on_close(self, reason=None): + self.app.on_close(reason) + + @property + def app(self): + if self._app: + return self._app + else: + raise Exception("No application coupled") + + @property + def server(self): + if not hasattr(self.app, 'ws'): + return None + + return self.app.ws.handler.server + + @property + def handler(self): + if not hasattr(self.app, 'ws'): + return None + + return self.app.ws.handler diff --git a/src/lib/geventwebsocket/protocols/wamp.py b/src/lib/geventwebsocket/protocols/wamp.py new file mode 100644 index 00000000..b5586537 --- /dev/null +++ b/src/lib/geventwebsocket/protocols/wamp.py @@ -0,0 +1,234 @@ +import inspect +import random +import string +import types + +try: + import ujson as json +except ImportError: + try: + import simplejson as json + except ImportError: + import json + +from ..exceptions import WebSocketError +from .base import BaseProtocol + + +def export_rpc(arg=None): + if isinstance(arg, types.FunctionType): + arg._rpc = arg.__name__ + return arg + + +def serialize(data): + return json.dumps(data) + + +class Prefixes(object): + def __init__(self): + self.prefixes = {} + + def add(self, prefix, uri): + self.prefixes[prefix] = uri + + def resolve(self, curie_or_uri): + if "http://" in curie_or_uri: + return curie_or_uri + elif ':' in curie_or_uri: + prefix, proc = curie_or_uri.split(':', 1) + return self.prefixes[prefix] + proc + else: + raise Exception(curie_or_uri) + + +class RemoteProcedures(object): + def __init__(self): + self.calls = {} + + def register_procedure(self, uri, proc): + self.calls[uri] = proc + + def register_object(self, uri, obj): + for k in inspect.getmembers(obj, inspect.ismethod): + if '_rpc' in k[1].__dict__: + proc_uri = uri + k[1]._rpc + self.calls[proc_uri] = (obj, k[1]) + + def call(self, uri, args): + if uri in self.calls: + proc = self.calls[uri] + + # Do the correct call whether it's a function or instance method. + if isinstance(proc, tuple): + if proc[1].__self__ is None: + # Create instance of object and call method + return proc[1](proc[0](), *args) + else: + # Call bound method on instance + return proc[1](*args) + else: + return self.calls[uri](*args) + else: + raise Exception("no such uri '{}'".format(uri)) + + +class Channels(object): + def __init__(self): + self.channels = {} + + def create(self, uri, prefix_matching=False): + if uri not in self.channels: + self.channels[uri] = [] + + # TODO: implement prefix matching + + def subscribe(self, uri, client): + if uri in self.channels: + self.channels[uri].append(client) + + def unsubscribe(self, uri, client): + if uri not in self.channels: + return + + client_index = self.channels[uri].index(client) + self.channels[uri].pop(client_index) + + if len(self.channels[uri]) == 0: + del self.channels[uri] + + def publish(self, uri, event, exclude=None, eligible=None): + if uri not in self.channels: + return + + # TODO: exclude & eligible + + msg = [WampProtocol.MSG_EVENT, uri, event] + + for client in self.channels[uri]: + try: + client.ws.send(serialize(msg)) + except WebSocketError: + # Seems someone didn't unsubscribe before disconnecting + self.channels[uri].remove(client) + + +class WampProtocol(BaseProtocol): + MSG_WELCOME = 0 + MSG_PREFIX = 1 + MSG_CALL = 2 + MSG_CALL_RESULT = 3 + MSG_CALL_ERROR = 4 + MSG_SUBSCRIBE = 5 + MSG_UNSUBSCRIBE = 6 + MSG_PUBLISH = 7 + MSG_EVENT = 8 + + PROTOCOL_NAME = "wamp" + + def __init__(self, *args, **kwargs): + self.procedures = RemoteProcedures() + self.prefixes = Prefixes() + self.session_id = ''.join( + [random.choice(string.digits + string.letters) + for i in xrange(16)]) + + super(WampProtocol, self).__init__(*args, **kwargs) + + def register_procedure(self, *args, **kwargs): + self.procedures.register_procedure(*args, **kwargs) + + def register_object(self, *args, **kwargs): + self.procedures.register_object(*args, **kwargs) + + def register_pubsub(self, *args, **kwargs): + if not hasattr(self.server, 'channels'): + self.server.channels = Channels() + + self.server.channels.create(*args, **kwargs) + + def do_handshake(self): + from geventwebsocket import get_version + + welcome = [ + self.MSG_WELCOME, + self.session_id, + 1, + 'gevent-websocket/' + get_version() + ] + self.app.ws.send(serialize(welcome)) + + def _get_exception_info(self, e): + uri = 'http://TODO#generic' + desc = str(type(e)) + details = str(e) + return [uri, desc, details] + + def rpc_call(self, data): + call_id, curie_or_uri = data[1:3] + args = data[3:] + + if not isinstance(call_id, (str, unicode)): + raise Exception() + if not isinstance(curie_or_uri, (str, unicode)): + raise Exception() + + uri = self.prefixes.resolve(curie_or_uri) + + try: + result = self.procedures.call(uri, args) + result_msg = [self.MSG_CALL_RESULT, call_id, result] + except Exception, e: + result_msg = [self.MSG_CALL_ERROR, + call_id] + self._get_exception_info(e) + + self.app.on_message(serialize(result_msg)) + + def pubsub_action(self, data): + action = data[0] + curie_or_uri = data[1] + + if not isinstance(action, int): + raise Exception() + if not isinstance(curie_or_uri, (str, unicode)): + raise Exception() + + uri = self.prefixes.resolve(curie_or_uri) + + if action == self.MSG_SUBSCRIBE and len(data) == 2: + self.server.channels.subscribe(data[1], self.handler.active_client) + + elif action == self.MSG_UNSUBSCRIBE and len(data) == 2: + self.server.channels.unsubscribe( + data[1], self.handler.active_client) + + elif action == self.MSG_PUBLISH and len(data) >= 3: + payload = data[2] if len(data) >= 3 else None + exclude = data[3] if len(data) >= 4 else None + eligible = data[4] if len(data) >= 5 else None + + self.server.channels.publish(uri, payload, exclude, eligible) + + def on_open(self): + self.app.on_open() + self.do_handshake() + + def on_message(self, message): + data = json.loads(message) + + if not isinstance(data, list): + raise Exception('incoming data is no list') + + if data[0] == self.MSG_PREFIX and len(data) == 3: + prefix, uri = data[1:3] + self.prefixes.add(prefix, uri) + + elif data[0] == self.MSG_CALL and len(data) >= 3: + return self.rpc_call(data) + + elif data[0] in (self.MSG_SUBSCRIBE, self.MSG_UNSUBSCRIBE, + self.MSG_PUBLISH): + return self.pubsub_action(data) + else: + raise Exception("Unknown call") + diff --git a/src/lib/geventwebsocket/resource.py b/src/lib/geventwebsocket/resource.py new file mode 100644 index 00000000..36c1fb36 --- /dev/null +++ b/src/lib/geventwebsocket/resource.py @@ -0,0 +1,74 @@ +import re + +from .protocols.base import BaseProtocol +from .exceptions import WebSocketError + + +class WebSocketApplication(object): + protocol_class = BaseProtocol + + def __init__(self, ws): + self.protocol = self.protocol_class(self) + self.ws = ws + + def handle(self): + self.protocol.on_open() + + while True: + try: + message = self.ws.receive() + except WebSocketError: + self.protocol.on_close() + break + + self.protocol.on_message(message) + + def on_open(self, *args, **kwargs): + pass + + def on_close(self, *args, **kwargs): + pass + + def on_message(self, message, *args, **kwargs): + self.ws.send(message, **kwargs) + + @classmethod + def protocol_name(cls): + return cls.protocol_class.PROTOCOL_NAME + + +class Resource(object): + def __init__(self, apps=None): + self.apps = apps if apps else [] + + def _app_by_path(self, environ_path): + # Which app matched the current path? + + for path, app in self.apps.iteritems(): + if re.match(path, environ_path): + return app + + def app_protocol(self, path): + app = self._app_by_path(path) + + if hasattr(app, 'protocol_name'): + return app.protocol_name() + else: + return '' + + def __call__(self, environ, start_response): + environ = environ + current_app = self._app_by_path(environ['PATH_INFO']) + + if current_app is None: + raise Exception("No apps defined") + + if 'wsgi.websocket' in environ: + ws = environ['wsgi.websocket'] + current_app = current_app(ws) + current_app.ws = ws # TODO: needed? + current_app.handle() + + return None + else: + return current_app(environ, start_response) diff --git a/src/lib/geventwebsocket/server.py b/src/lib/geventwebsocket/server.py new file mode 100644 index 00000000..00443b8a --- /dev/null +++ b/src/lib/geventwebsocket/server.py @@ -0,0 +1,34 @@ +from gevent.pywsgi import WSGIServer + +from .handler import WebSocketHandler +from .logging import create_logger + + +class WebSocketServer(WSGIServer): + debug_log_format = ( + '-' * 80 + '\n' + + '%(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:\n' + + '%(message)s\n' + + '-' * 80 + ) + + def __init__(self, *args, **kwargs): + self.debug = kwargs.pop('debug', False) + self.pre_start_hook = kwargs.pop('pre_start_hook', None) + self._logger = None + self.clients = {} + + kwargs['handler_class'] = WebSocketHandler + super(WebSocketServer, self).__init__(*args, **kwargs) + + def handle(self, socket, address): + handler = self.handler_class(socket, address, self) + handler.handle() + + @property + def logger(self): + if not self._logger: + self._logger = create_logger( + __name__, self.debug, self.debug_log_format) + + return self._logger diff --git a/src/lib/geventwebsocket/utf8validator.py b/src/lib/geventwebsocket/utf8validator.py new file mode 100644 index 00000000..b8a3e8a5 --- /dev/null +++ b/src/lib/geventwebsocket/utf8validator.py @@ -0,0 +1,128 @@ +############################################################################### +## +## Copyright 2011-2013 Tavendo GmbH +## +## Note: +## +## This code is a Python implementation of the algorithm +## +## "Flexible and Economical UTF-8 Decoder" +## +## by Bjoern Hoehrmann +## +## bjoern@hoehrmann.de +## http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ +## +## Licensed under the Apache License, Version 2.0 (the "License"); +## you may not use this file except in compliance with the License. +## You may obtain a copy of the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, +## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +## See the License for the specific language governing permissions and +## limitations under the License. +## +############################################################################### + + +## use Cython implementation of UTF8 validator if available +## +try: + from wsaccel.utf8validator import Utf8Validator +except: + ## fallback to pure Python implementation + + class Utf8Validator: + """ + Incremental UTF-8 validator with constant memory consumption (minimal + state). + + Implements the algorithm "Flexible and Economical UTF-8 Decoder" by + Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/). + """ + + ## DFA transitions + UTF8VALIDATOR_DFA = [ + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 00..1f + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 20..3f + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 40..5f + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 60..7f + 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, # 80..9f + 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, # a0..bf + 8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, # c0..df + 0xa,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x4,0x3,0x3, # e0..ef + 0xb,0x6,0x6,0x6,0x5,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8, # f0..ff + 0x0,0x1,0x2,0x3,0x5,0x8,0x7,0x1,0x1,0x1,0x4,0x6,0x1,0x1,0x1,0x1, # s0..s0 + 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1, # s1..s2 + 1,2,1,1,1,1,1,2,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1, # s3..s4 + 1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,3,1,3,1,1,1,1,1,1, # s5..s6 + 1,3,1,1,1,1,1,3,1,3,1,1,1,1,1,1,1,3,1,1,1,1,1,1,1,1,1,1,1,1,1,1, # s7..s8 + ] + + UTF8_ACCEPT = 0 + UTF8_REJECT = 1 + + def __init__(self): + self.reset() + + def decode(self, b): + """ + Eat one UTF-8 octet, and validate on the fly. + + Returns UTF8_ACCEPT when enough octets have been consumed, in which case + self.codepoint contains the decoded Unicode code point. + + Returns UTF8_REJECT when invalid UTF-8 was encountered. + + Returns some other positive integer when more octets need to be eaten. + """ + type = Utf8Validator.UTF8VALIDATOR_DFA[b] + + if self.state != Utf8Validator.UTF8_ACCEPT: + self.codepoint = (b & 0x3f) | (self.codepoint << 6) + else: + self.codepoint = (0xff >> type) & b + + self.state = Utf8Validator.UTF8VALIDATOR_DFA[256 + self.state * 16 + type] + + return self.state + + def reset(self): + """ + Reset validator to start new incremental UTF-8 decode/validation. + """ + self.state = Utf8Validator.UTF8_ACCEPT + self.codepoint = 0 + self.i = 0 + + def validate(self, ba): + """ + Incrementally validate a chunk of bytes provided as string. + + Will return a quad (valid?, endsOnCodePoint?, currentIndex, totalIndex). + + As soon as an octet is encountered which renders the octet sequence + invalid, a quad with valid? == False is returned. currentIndex returns + the index within the currently consumed chunk, and totalIndex the + index within the total consumed sequence that was the point of bail out. + When valid? == True, currentIndex will be len(ba) and totalIndex the + total amount of consumed bytes. + """ + + l = len(ba) + + for i in xrange(l): + ## optimized version of decode(), since we are not interested in actual code points + + self.state = Utf8Validator.UTF8VALIDATOR_DFA[256 + (self.state << 4) + Utf8Validator.UTF8VALIDATOR_DFA[ord(ba[i])]] + + if self.state == Utf8Validator.UTF8_REJECT: + self.i += i + return False, False, i, self.i + + self.i += l + + return True, self.state == Utf8Validator.UTF8_ACCEPT, l, self.i diff --git a/src/lib/geventwebsocket/utils.py b/src/lib/geventwebsocket/utils.py new file mode 100644 index 00000000..2e5bc3b7 --- /dev/null +++ b/src/lib/geventwebsocket/utils.py @@ -0,0 +1,45 @@ +import subprocess + + +def get_version(version=None): + "Returns a PEP 386-compliant version number from VERSION." + + if version is None: + from geventwebsocket import VERSION as version + else: + assert len(version) == 5 + assert version[3] in ('alpha', 'beta', 'rc', 'final') + + # Now build the two parts of the version number: + # main = X.Y[.Z] + # sub = .devN - for pre-alpha releases + # | {a|b|c}N - for alpha, beta and rc releases + + parts = 2 if version[2] == 0 else 3 + main = '.'.join(str(x) for x in version[:parts]) + + sub = '' + if version[3] == 'alpha' and version[4] == 0: + hg_changeset = get_hg_changeset() + if hg_changeset: + sub = '.dev{0}'.format(hg_changeset) + + elif version[3] != 'final': + mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} + sub = mapping[version[3]] + str(version[4]) + + return str(main + sub) + + +def get_hg_changeset(): + rev, err = subprocess.Popen( + 'hg id -i', + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ).communicate() + + if err: + return None + else: + return rev.strip().replace('+', '') diff --git a/src/lib/geventwebsocket/websocket.py b/src/lib/geventwebsocket/websocket.py new file mode 100644 index 00000000..6d4f76d3 --- /dev/null +++ b/src/lib/geventwebsocket/websocket.py @@ -0,0 +1,543 @@ +import struct + +from socket import error + +from .exceptions import ProtocolError +from .exceptions import WebSocketError +from .exceptions import FrameTooLargeException + +from .utf8validator import Utf8Validator + + +MSG_SOCKET_DEAD = "Socket is dead" +MSG_ALREADY_CLOSED = "Connection is already closed" +MSG_CLOSED = "Connection closed" + + +class WebSocket(object): + """ + Base class for supporting websocket operations. + + :ivar environ: The http environment referenced by this connection. + :ivar closed: Whether this connection is closed/closing. + :ivar stream: The underlying file like object that will be read from / + written to by this WebSocket object. + """ + + __slots__ = ('utf8validator', 'utf8validate_last', 'environ', 'closed', + 'stream', 'raw_write', 'raw_read', 'handler') + + OPCODE_CONTINUATION = 0x00 + OPCODE_TEXT = 0x01 + OPCODE_BINARY = 0x02 + OPCODE_CLOSE = 0x08 + OPCODE_PING = 0x09 + OPCODE_PONG = 0x0a + + def __init__(self, environ, stream, handler): + self.environ = environ + self.closed = False + + self.stream = stream + + self.raw_write = stream.write + self.raw_read = stream.read + + self.utf8validator = Utf8Validator() + self.handler = handler + + def __del__(self): + try: + self.close() + except: + # close() may fail if __init__ didn't complete + pass + + def _decode_bytes(self, bytestring): + """ + Internal method used to convert the utf-8 encoded bytestring into + unicode. + + If the conversion fails, the socket will be closed. + """ + + if not bytestring: + return u'' + + try: + return bytestring.decode('utf-8') + except UnicodeDecodeError: + self.close(1007) + + raise + + def _encode_bytes(self, text): + """ + :returns: The utf-8 byte string equivalent of `text`. + """ + + if isinstance(text, str): + return text + + if not isinstance(text, unicode): + text = unicode(text or '') + + return text.encode('utf-8') + + def _is_valid_close_code(self, code): + """ + :returns: Whether the returned close code is a valid hybi return code. + """ + if code < 1000: + return False + + if 1004 <= code <= 1006: + return False + + if 1012 <= code <= 1016: + return False + + if code == 1100: + # not sure about this one but the autobahn fuzzer requires it. + return False + + if 2000 <= code <= 2999: + return False + + return True + + @property + def current_app(self): + if hasattr(self.handler.server.application, 'current_app'): + return self.handler.server.application.current_app + else: + # For backwards compatibility reasons + class MockApp(): + def on_close(self, *args): + pass + + return MockApp() + + @property + def origin(self): + if not self.environ: + return + + return self.environ.get('HTTP_ORIGIN') + + @property + def protocol(self): + if not self.environ: + return + + return self.environ.get('HTTP_SEC_WEBSOCKET_PROTOCOL') + + @property + def version(self): + if not self.environ: + return + + return self.environ.get('HTTP_SEC_WEBSOCKET_VERSION') + + @property + def path(self): + if not self.environ: + return + + return self.environ.get('PATH_INFO') + + @property + def logger(self): + return self.handler.logger + + def handle_close(self, header, payload): + """ + Called when a close frame has been decoded from the stream. + + :param header: The decoded `Header`. + :param payload: The bytestring payload associated with the close frame. + """ + if not payload: + self.close(1000, None) + + return + + if len(payload) < 2: + raise ProtocolError('Invalid close frame: {0} {1}'.format( + header, payload)) + + code = struct.unpack('!H', str(payload[:2]))[0] + payload = payload[2:] + + if payload: + validator = Utf8Validator() + val = validator.validate(payload) + + if not val[0]: + raise UnicodeError + + if not self._is_valid_close_code(code): + raise ProtocolError('Invalid close code {0}'.format(code)) + + self.close(code, payload) + + def handle_ping(self, header, payload): + self.send_frame(payload, self.OPCODE_PONG) + + def handle_pong(self, header, payload): + pass + + def read_frame(self): + """ + Block until a full frame has been read from the socket. + + This is an internal method as calling this will not cleanup correctly + if an exception is called. Use `receive` instead. + + :return: The header and payload as a tuple. + """ + + header = Header.decode_header(self.stream) + + if header.flags: + raise ProtocolError + + if not header.length: + return header, '' + + try: + payload = self.raw_read(header.length) + except error: + payload = '' + except Exception: + # TODO log out this exception + payload = '' + + if len(payload) != header.length: + raise WebSocketError('Unexpected EOF reading frame payload') + + if header.mask: + payload = header.unmask_payload(payload) + + return header, payload + + def validate_utf8(self, payload): + # Make sure the frames are decodable independently + self.utf8validate_last = self.utf8validator.validate(payload) + + if not self.utf8validate_last[0]: + raise UnicodeError("Encountered invalid UTF-8 while processing " + "text message at payload octet index " + "{0:d}".format(self.utf8validate_last[3])) + + def read_message(self): + """ + Return the next text or binary message from the socket. + + This is an internal method as calling this will not cleanup correctly + if an exception is called. Use `receive` instead. + """ + opcode = None + message = "" + + while True: + header, payload = self.read_frame() + f_opcode = header.opcode + + if f_opcode in (self.OPCODE_TEXT, self.OPCODE_BINARY): + # a new frame + if opcode: + raise ProtocolError("The opcode in non-fin frame is " + "expected to be zero, got " + "{0!r}".format(f_opcode)) + + # Start reading a new message, reset the validator + self.utf8validator.reset() + self.utf8validate_last = (True, True, 0, 0) + + opcode = f_opcode + + elif f_opcode == self.OPCODE_CONTINUATION: + if not opcode: + raise ProtocolError("Unexpected frame with opcode=0") + + elif f_opcode == self.OPCODE_PING: + self.handle_ping(header, payload) + continue + + elif f_opcode == self.OPCODE_PONG: + self.handle_pong(header, payload) + continue + + elif f_opcode == self.OPCODE_CLOSE: + self.handle_close(header, payload) + return + + else: + raise ProtocolError("Unexpected opcode={0!r}".format(f_opcode)) + + if opcode == self.OPCODE_TEXT: + self.validate_utf8(payload) + + message += payload + + if header.fin: + break + + if opcode == self.OPCODE_TEXT: + self.validate_utf8(message) + return message + else: + return bytearray(message) + + def receive(self): + """ + Read and return a message from the stream. If `None` is returned, then + the socket is considered closed/errored. + """ + + if self.closed: + self.current_app.on_close(MSG_ALREADY_CLOSED) + raise WebSocketError(MSG_ALREADY_CLOSED) + + try: + return self.read_message() + except UnicodeError: + self.close(1007) + except ProtocolError: + self.close(1002) + except error: + self.close() + self.current_app.on_close(MSG_CLOSED) + + return None + + def send_frame(self, message, opcode): + """ + Send a frame over the websocket with message as its payload + """ + if self.closed: + self.current_app.on_close(MSG_ALREADY_CLOSED) + raise WebSocketError(MSG_ALREADY_CLOSED) + + if opcode == self.OPCODE_TEXT: + message = self._encode_bytes(message) + elif opcode == self.OPCODE_BINARY: + message = str(message) + + header = Header.encode_header(True, opcode, '', len(message), 0) + + try: + self.raw_write(header + message) + except error: + raise WebSocketError(MSG_SOCKET_DEAD) + + def send(self, message, binary=None): + """ + Send a frame over the websocket with message as its payload + """ + if binary is None: + binary = not isinstance(message, (str, unicode)) + + opcode = self.OPCODE_BINARY if binary else self.OPCODE_TEXT + + try: + self.send_frame(message, opcode) + except WebSocketError: + self.current_app.on_close(MSG_SOCKET_DEAD) + raise WebSocketError(MSG_SOCKET_DEAD) + + def close(self, code=1000, message=''): + """ + Close the websocket and connection, sending the specified code and + message. The underlying socket object is _not_ closed, that is the + responsibility of the initiator. + """ + + if self.closed: + self.current_app.on_close(MSG_ALREADY_CLOSED) + + try: + message = self._encode_bytes(message) + + self.send_frame( + struct.pack('!H%ds' % len(message), code, message), + opcode=self.OPCODE_CLOSE) + except WebSocketError: + # Failed to write the closing frame but it's ok because we're + # closing the socket anyway. + self.logger.debug("Failed to write closing frame -> closing socket") + finally: + self.logger.debug("Closed WebSocket") + self.closed = True + + self.stream = None + self.raw_write = None + self.raw_read = None + + self.environ = None + + #self.current_app.on_close(MSG_ALREADY_CLOSED) + + +class Stream(object): + """ + Wraps the handler's socket/rfile attributes and makes it in to a file like + object that can be read from/written to by the lower level websocket api. + """ + + __slots__ = ('handler', 'read', 'write') + + def __init__(self, handler): + self.handler = handler + self.read = handler.rfile.read + self.write = handler.socket.sendall + + +class Header(object): + __slots__ = ('fin', 'mask', 'opcode', 'flags', 'length') + + FIN_MASK = 0x80 + OPCODE_MASK = 0x0f + MASK_MASK = 0x80 + LENGTH_MASK = 0x7f + + RSV0_MASK = 0x40 + RSV1_MASK = 0x20 + RSV2_MASK = 0x10 + + # bitwise mask that will determine the reserved bits for a frame header + HEADER_FLAG_MASK = RSV0_MASK | RSV1_MASK | RSV2_MASK + + def __init__(self, fin=0, opcode=0, flags=0, length=0): + self.mask = '' + self.fin = fin + self.opcode = opcode + self.flags = flags + self.length = length + + def mask_payload(self, payload): + payload = bytearray(payload) + mask = bytearray(self.mask) + + for i in xrange(self.length): + payload[i] ^= mask[i % 4] + + return str(payload) + + # it's the same operation + unmask_payload = mask_payload + + def __repr__(self): + return ("
    ").format(self.fin, self.opcode, self.length, + self.flags, id(self)) + + @classmethod + def decode_header(cls, stream): + """ + Decode a WebSocket header. + + :param stream: A file like object that can be 'read' from. + :returns: A `Header` instance. + """ + read = stream.read + data = read(2) + + if len(data) != 2: + raise WebSocketError("Unexpected EOF while decoding header") + + first_byte, second_byte = struct.unpack('!BB', data) + + header = cls( + fin=first_byte & cls.FIN_MASK == cls.FIN_MASK, + opcode=first_byte & cls.OPCODE_MASK, + flags=first_byte & cls.HEADER_FLAG_MASK, + length=second_byte & cls.LENGTH_MASK) + + has_mask = second_byte & cls.MASK_MASK == cls.MASK_MASK + + if header.opcode > 0x07: + if not header.fin: + raise ProtocolError( + "Received fragmented control frame: {0!r}".format(data)) + + # Control frames MUST have a payload length of 125 bytes or less + if header.length > 125: + raise FrameTooLargeException( + "Control frame cannot be larger than 125 bytes: " + "{0!r}".format(data)) + + if header.length == 126: + # 16 bit length + data = read(2) + + if len(data) != 2: + raise WebSocketError('Unexpected EOF while decoding header') + + header.length = struct.unpack('!H', data)[0] + elif header.length == 127: + # 64 bit length + data = read(8) + + if len(data) != 8: + raise WebSocketError('Unexpected EOF while decoding header') + + header.length = struct.unpack('!Q', data)[0] + + if has_mask: + mask = read(4) + + if len(mask) != 4: + raise WebSocketError('Unexpected EOF while decoding header') + + header.mask = mask + + return header + + @classmethod + def encode_header(cls, fin, opcode, mask, length, flags): + """ + Encodes a WebSocket header. + + :param fin: Whether this is the final frame for this opcode. + :param opcode: The opcode of the payload, see `OPCODE_*` + :param mask: Whether the payload is masked. + :param length: The length of the frame. + :param flags: The RSV* flags. + :return: A bytestring encoded header. + """ + first_byte = opcode + second_byte = 0 + extra = '' + + if fin: + first_byte |= cls.FIN_MASK + + if flags & cls.RSV0_MASK: + first_byte |= cls.RSV0_MASK + + if flags & cls.RSV1_MASK: + first_byte |= cls.RSV1_MASK + + if flags & cls.RSV2_MASK: + first_byte |= cls.RSV2_MASK + + # now deal with length complexities + if length < 126: + second_byte += length + elif length <= 0xffff: + second_byte += 126 + extra = struct.pack('!H', length) + elif length <= 0xffffffffffffffff: + second_byte += 127 + extra = struct.pack('!Q', length) + else: + raise FrameTooLargeException + + if mask: + second_byte |= cls.MASK_MASK + + extra += mask + + return chr(first_byte) + chr(second_byte) + extra diff --git a/src/lib/libsecp256k1message/__init__.py b/src/lib/libsecp256k1message/__init__.py deleted file mode 100644 index 753f384e..00000000 --- a/src/lib/libsecp256k1message/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .libsecp256k1message import * \ No newline at end of file diff --git a/src/lib/libsecp256k1message/libsecp256k1message.py b/src/lib/libsecp256k1message/libsecp256k1message.py deleted file mode 100644 index 59768b88..00000000 --- a/src/lib/libsecp256k1message/libsecp256k1message.py +++ /dev/null @@ -1,162 +0,0 @@ -import hashlib -import base64 -from coincurve import PrivateKey, PublicKey -from base58 import b58encode_check, b58decode_check -from hmac import compare_digest -from util.Electrum import format as zero_format - -RECID_MIN = 0 -RECID_MAX = 3 -RECID_UNCOMPR = 27 -LEN_COMPACT_SIG = 65 - -class SignatureError(ValueError): - pass - -def bitcoin_address(): - """Generate a public address and a secret address.""" - publickey, secretkey = key_pair() - - public_address = compute_public_address(publickey) - secret_address = compute_secret_address(secretkey) - - return (public_address, secret_address) - -def key_pair(): - """Generate a public key and a secret key.""" - secretkey = PrivateKey() - publickey = PublicKey.from_secret(secretkey.secret) - return (publickey, secretkey) - -def compute_public_address(publickey, compressed=False): - """Convert a public key to a public Bitcoin address.""" - public_plain = b'\x00' + public_digest(publickey, compressed=compressed) - return b58encode_check(public_plain) - -def compute_secret_address(secretkey): - """Convert a secret key to a secret Bitcoin address.""" - secret_plain = b'\x80' + secretkey.secret - return b58encode_check(secret_plain) - -def public_digest(publickey, compressed=False): - """Convert a public key to ripemd160(sha256()) digest.""" - publickey_hex = publickey.format(compressed=compressed) - return hashlib.new('ripemd160', hashlib.sha256(publickey_hex).digest()).digest() - -def address_public_digest(address): - """Convert a public Bitcoin address to ripemd160(sha256()) digest.""" - public_plain = b58decode_check(address) - if not public_plain.startswith(b'\x00') or len(public_plain) != 21: - raise ValueError('Invalid public key digest') - return public_plain[1:] - -def _decode_bitcoin_secret(address): - secret_plain = b58decode_check(address) - if not secret_plain.startswith(b'\x80') or len(secret_plain) != 33: - raise ValueError('Invalid secret key. Uncompressed keys only.') - return secret_plain[1:] - -def recover_public_key(signature, message): - """Recover public key from signature and message. - Recovered public key guarantees a correct signature""" - return PublicKey.from_signature_and_message(signature, message) - -def decode_secret_key(address): - """Convert a secret Bitcoin address to a secret key.""" - return PrivateKey(_decode_bitcoin_secret(address)) - - -def coincurve_sig(electrum_signature): - # coincurve := r + s + recovery_id - # where (0 <= recovery_id <= 3) - # https://github.com/bitcoin-core/secp256k1/blob/0b7024185045a49a1a6a4c5615bf31c94f63d9c4/src/modules/recovery/main_impl.h#L35 - if len(electrum_signature) != LEN_COMPACT_SIG: - raise ValueError('Not a 65-byte compact signature.') - # Compute coincurve recid - recid = (electrum_signature[0] - 27) & 3 - if not (RECID_MIN <= recid <= RECID_MAX): - raise ValueError('Recovery ID %d is not supported.' % recid) - recid_byte = int.to_bytes(recid, length=1, byteorder='big') - return electrum_signature[1:] + recid_byte - - -def electrum_sig(coincurve_signature): - # electrum := recovery_id + r + s - # where (27 <= recovery_id <= 30) - # https://github.com/scintill/bitcoin-signature-tools/blob/ed3f5be5045af74a54c92d3648de98c329d9b4f7/key.cpp#L285 - if len(coincurve_signature) != LEN_COMPACT_SIG: - raise ValueError('Not a 65-byte compact signature.') - # Compute Electrum recid - recid = coincurve_signature[-1] + RECID_UNCOMPR - if not (RECID_UNCOMPR + RECID_MIN <= recid <= RECID_UNCOMPR + RECID_MAX): - raise ValueError('Recovery ID %d is not supported.' % recid) - recid_byte = int.to_bytes(recid, length=1, byteorder='big') - return recid_byte + coincurve_signature[0:-1] - -def sign_data(secretkey, byte_string): - """Sign [byte_string] with [secretkey]. - Return serialized signature compatible with Electrum (ZeroNet).""" - # encode the message - encoded = zero_format(byte_string) - # sign the message and get a coincurve signature - signature = secretkey.sign_recoverable(encoded) - # reserialize signature and return it - return electrum_sig(signature) - -def verify_data(key_digest, electrum_signature, byte_string): - """Verify if [electrum_signature] of [byte_string] is correctly signed and - is signed with the secret counterpart of [key_digest]. - Raise SignatureError if the signature is forged or otherwise problematic.""" - # reserialize signature - signature = coincurve_sig(electrum_signature) - # encode the message - encoded = zero_format(byte_string) - # recover full public key from signature - # "which guarantees a correct signature" - publickey = recover_public_key(signature, encoded) - - # verify that the message is correctly signed by the public key - # correct_sig = verify_sig(publickey, signature, encoded) - - # verify that the public key is what we expect - correct_key = verify_key(publickey, key_digest) - - if not correct_key: - raise SignatureError('Signature is forged!') - -def verify_sig(publickey, signature, byte_string): - return publickey.verify(signature, byte_string) - -def verify_key(publickey, key_digest): - return compare_digest(key_digest, public_digest(publickey)) - -def recover_address(data, sign): - sign_bytes = base64.b64decode(sign) - is_compressed = ((sign_bytes[0] - 27) & 4) != 0 - publickey = recover_public_key(coincurve_sig(sign_bytes), zero_format(data)) - return compute_public_address(publickey, compressed=is_compressed) - -__all__ = [ - 'SignatureError', - 'key_pair', 'compute_public_address', 'compute_secret_address', - 'public_digest', 'address_public_digest', 'recover_public_key', 'decode_secret_key', - 'sign_data', 'verify_data', "recover_address" -] - -if __name__ == "__main__": - import base64, time, multiprocessing - s = time.time() - privatekey = decode_secret_key(b"5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk") - threads = [] - for i in range(1000): - data = bytes("hello", "utf8") - address = recover_address(data, "HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ=") - print("- Verify x10000: %.3fs %s" % (time.time() - s, address)) - - s = time.time() - for i in range(1000): - privatekey = decode_secret_key(b"5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk") - sign = sign_data(privatekey, b"hello") - sign_b64 = base64.b64encode(sign) - - print("- Sign x1000: %.3fs" % (time.time() - s)) diff --git a/src/lib/opensslVerify/HashInfo.txt b/src/lib/opensslVerify/HashInfo.txt new file mode 100644 index 00000000..f5308e27 Binary files /dev/null and b/src/lib/opensslVerify/HashInfo.txt differ diff --git a/src/lib/opensslVerify/OpenSSL License.txt b/src/lib/opensslVerify/OpenSSL License.txt new file mode 100644 index 00000000..97234459 --- /dev/null +++ b/src/lib/opensslVerify/OpenSSL License.txt @@ -0,0 +1,126 @@ + + LICENSE ISSUES + ============== + + The OpenSSL toolkit stays under a dual license, i.e. both the conditions of + the OpenSSL License and the original SSLeay license apply to the toolkit. + See below for the actual license texts. Actually both licenses are BSD-style + Open Source licenses. In case of any license issues related to OpenSSL + please contact openssl-core@openssl.org. + + OpenSSL License + --------------- + +/* ==================================================================== + * Copyright (c) 1998-2011 The OpenSSL Project. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. All advertising materials mentioning features or use of this + * software must display the following acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit. (http://www.openssl.org/)" + * + * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to + * endorse or promote products derived from this software without + * prior written permission. For written permission, please contact + * openssl-core@openssl.org. + * + * 5. Products derived from this software may not be called "OpenSSL" + * nor may "OpenSSL" appear in their names without prior written + * permission of the OpenSSL Project. + * + * 6. Redistributions of any form whatsoever must retain the following + * acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit (http://www.openssl.org/)" + * + * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY + * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + * OF THE POSSIBILITY OF SUCH DAMAGE. + * ==================================================================== + * + * This product includes cryptographic software written by Eric Young + * (eay@cryptsoft.com). This product includes software written by Tim + * Hudson (tjh@cryptsoft.com). + * + */ + + Original SSLeay License + ----------------------- + +/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) + * All rights reserved. + * + * This package is an SSL implementation written + * by Eric Young (eay@cryptsoft.com). + * The implementation was written so as to conform with Netscapes SSL. + * + * This library is free for commercial and non-commercial use as long as + * the following conditions are aheared to. The following conditions + * apply to all code found in this distribution, be it the RC4, RSA, + * lhash, DES, etc., code; not just the SSL code. The SSL documentation + * included with this distribution is covered by the same copyright terms + * except that the holder is Tim Hudson (tjh@cryptsoft.com). + * + * Copyright remains Eric Young's, and as such any Copyright notices in + * the code are not to be removed. + * If this package is used in a product, Eric Young should be given attribution + * as the author of the parts of the library used. + * This can be in the form of a textual message at program startup or + * in documentation (online or textual) provided with the package. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. All advertising materials mentioning features or use of this software + * must display the following acknowledgement: + * "This product includes cryptographic software written by + * Eric Young (eay@cryptsoft.com)" + * The word 'cryptographic' can be left out if the rouines from the library + * being used are not cryptographic related :-). + * 4. If you include any Windows specific code (or a derivative thereof) from + * the apps directory (application code) you must include an acknowledgement: + * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" + * + * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * + * The licence and distribution terms for any publically available version or + * derivative of this code cannot be changed. i.e. this code cannot simply be + * copied and put under another distribution licence + * [including the GNU Public Licence.] + */ diff --git a/src/lib/opensslVerify/ReadMe.txt b/src/lib/opensslVerify/ReadMe.txt new file mode 100644 index 00000000..352ccef6 --- /dev/null +++ b/src/lib/opensslVerify/ReadMe.txt @@ -0,0 +1,59 @@ +============================================================================= +OpenSSL v1.0.2a Precompiled Binaries for Win32 +----------------------------------------------------------------------------- + + *** Release Information *** + +Release Date: Mrz 20, 2015 + +Author: Frederik A. Winkelsdorf (opendec.wordpress.com) + for the Indy Project (www.indyproject.org) + +Requirements: Indy 10.5.5+ (SVN Version or Delphi 2009 and newer) + +Dependencies: The libraries have no noteworthy dependencies + +Installation: Copy both DLL files into your application directory + +Supported OS: Windows 2000 up to Windows 8 + +----------------------------------------------------------------------------- + + *** Legal Disclaimer *** + +THIS SOFTWARE IS PROVIDED BY ITS AUTHOR AND THE INDY PROJECT "AS IS" AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +OpenSSL license terms are provided in the file "OpenSSL License.txt". + +PLEASE CHECK IF YOU NEED TO COMPLY WITH EXPORT RESTRICTIONS FOR CRYPTOGRAPHIC +SOFTWARE AND/OR PATENTS. + +----------------------------------------------------------------------------- + + *** Build Information Win32 *** + +Built with: Microsoft Visual C++ 2008 Express Edition + The Netwide Assembler (NASM) v2.11.05 Win32 + Strawberry Perl v5.20.0.1 Win32 Portable + Windows PowerShell + FinalBuilder 7 Embarcadero Edition + +Commands: perl configure VC-WIN32 + ms\do_nasm + adjusted ms\ntdll.mak (replaced "/MD" with "/MT") + adjusted ms\version32.rc (Indy Information inserted) + nmake -f ms\ntdll.mak + nmake -f ms\ntdll.mak test + editbin.exe /rebase:base=0x11000000 libeay32.dll + editbin.exe /rebase:base=0x12000000 ssleay32.dll + +============================================================================= \ No newline at end of file diff --git a/src/lib/opensslVerify/__init__.py b/src/lib/opensslVerify/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/lib/opensslVerify/gencert.cmd b/src/lib/opensslVerify/gencert.cmd new file mode 100644 index 00000000..105ba6a0 --- /dev/null +++ b/src/lib/opensslVerify/gencert.cmd @@ -0,0 +1,10 @@ +openssl req -x509 -newkey rsa:2048 -keyout key.pem -out cert.pem -nodes -config openssl.cnf +REM openssl ecparam -name secp521r1 -genkey -param_enc explicit -out key-ecc.pem -config openssl.cnf + +openssl ecparam -name secp256r1 -genkey -out key-ecc.pem +openssl req -new -key key-ecc.pem -x509 -nodes -out cert-ecc.pem -config openssl.cnf + +@echo off +REM openssl ecparam -genkey -name prime256v1 -out key.pem +REM openssl req -new -key key.pem -out csr.pem +REM openssl req -x509 -days 365 -key key.pem -in csr.pem -out certificate.pem \ No newline at end of file diff --git a/src/lib/opensslVerify/libeay32.dll b/src/lib/opensslVerify/libeay32.dll new file mode 100644 index 00000000..6359cc5a Binary files /dev/null and b/src/lib/opensslVerify/libeay32.dll differ diff --git a/src/lib/openssl/openssl.cnf b/src/lib/opensslVerify/openssl.cnf similarity index 78% rename from src/lib/openssl/openssl.cnf rename to src/lib/opensslVerify/openssl.cnf index 1c1ec47f..a2c48ba1 100644 --- a/src/lib/openssl/openssl.cnf +++ b/src/lib/opensslVerify/openssl.cnf @@ -1,4 +1,5 @@ [ req ] +prompt = no default_bits = 2048 default_keyfile = server-key.pem distinguished_name = subject @@ -16,7 +17,7 @@ organizationName = Example, LLC # Use a friendly name here because its presented to the user. The server's DNS # names are placed in Subject Alternate Names. Plus, DNS names here is deprecated -# by both IETF and CA/Browser Forums. If you place a DNS name here, then you +# by both IETF and CA/Browser Forums. If you place a DNS name here, then you # must include the DNS name in the SAN too (otherwise, Chrome and others that # strictly follow the CA/Browser Baseline Requirements will fail). commonName = Example Company @@ -31,8 +32,8 @@ authorityKeyIdentifier = keyid,issuer basicConstraints = CA:FALSE keyUsage = digitalSignature, keyEncipherment -extendedKeyUsage = clientAuth, serverAuth subjectAltName = @alternate_names +nsComment = "OpenSSL Generated Certificate" # RFC 5280, Section 4.2.1.12 makes EKU optional # CA/Browser Baseline Requirements, Appendix (B)(3)(G) makes me confused @@ -45,8 +46,8 @@ subjectKeyIdentifier = hash basicConstraints = CA:FALSE keyUsage = digitalSignature, keyEncipherment -extendedKeyUsage = clientAuth, serverAuth subjectAltName = @alternate_names +nsComment = "OpenSSL Generated Certificate" # RFC 5280, Section 4.2.1.12 makes EKU optional # CA/Browser Baseline Requirements, Appendix (B)(3)(G) makes me confused @@ -54,5 +55,16 @@ subjectAltName = @alternate_names [ alternate_names ] -DNS.1 = $ENV::CN -DNS.2 = www.$ENV::CN \ No newline at end of file +DNS.1 = example.com +DNS.2 = www.example.com +DNS.3 = mail.example.com +DNS.4 = ftp.example.com + +# Add these if you need them. But usually you don't want them or +# need them in production. You may need them for development. +# DNS.5 = localhost +# DNS.6 = localhost.localdomain +# DNS.7 = 127.0.0.1 + +# IPv6 localhost +# DNS.8 = ::1 \ No newline at end of file diff --git a/src/lib/opensslVerify/openssl.exe b/src/lib/opensslVerify/openssl.exe new file mode 100644 index 00000000..1f5127e5 Binary files /dev/null and b/src/lib/opensslVerify/openssl.exe differ diff --git a/src/lib/opensslVerify/opensslVerify.py b/src/lib/opensslVerify/opensslVerify.py new file mode 100644 index 00000000..5294816f --- /dev/null +++ b/src/lib/opensslVerify/opensslVerify.py @@ -0,0 +1,458 @@ +# via http://pastebin.com/H1XikJFd +# -*- Mode: Python -*- + +# This is a combination of http://pastebin.com/bQtdDzHx and +# https://github.com/Bitmessage/PyBitmessage/blob/master/src/pyelliptic/openssl.py +# that doesn't crash on OSX. +# Long message bug fixed by ZeroNet + +import ctypes +import ctypes.util +import _ctypes +import hashlib +import base64 +import time +import logging +import sys +import os + +addrtype = 0 + + +class _OpenSSL: + + """ + Wrapper for OpenSSL using ctypes + """ + + def __init__(self, library): + self.time_opened = time.time() + """ + Build the wrapper + """ + try: + self._lib = ctypes.CDLL(library) + except: + self._lib = ctypes.cdll.LoadLibrary(library) + + self.pointer = ctypes.pointer + self.c_int = ctypes.c_int + self.byref = ctypes.byref + self.create_string_buffer = ctypes.create_string_buffer + + self.BN_new = self._lib.BN_new + self.BN_new.restype = ctypes.c_void_p + self.BN_new.argtypes = [] + + self.BN_copy = self._lib.BN_copy + self.BN_copy.restype = ctypes.c_void_p + self.BN_copy.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.BN_mul_word = self._lib.BN_mul_word + self.BN_mul_word.restype = ctypes.c_int + self.BN_mul_word.argtypes = [ctypes.c_void_p, ctypes.c_int] + + self.BN_set_word = self._lib.BN_set_word + self.BN_set_word.restype = ctypes.c_int + self.BN_set_word.argtypes = [ctypes.c_void_p, ctypes.c_int] + + self.BN_add = self._lib.BN_add + self.BN_add.restype = ctypes.c_void_p + self.BN_add.argtypes = [ctypes.c_void_p, ctypes.c_void_p, + ctypes.c_void_p] + + self.BN_mod_sub = self._lib.BN_mod_sub + self.BN_mod_sub.restype = ctypes.c_int + self.BN_mod_sub.argtypes = [ctypes.c_void_p, ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p] + + self.BN_mod_mul = self._lib.BN_mod_mul + self.BN_mod_mul.restype = ctypes.c_int + self.BN_mod_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p] + + self.BN_mod_inverse = self._lib.BN_mod_inverse + self.BN_mod_inverse.restype = ctypes.c_void_p + self.BN_mod_inverse.argtypes = [ctypes.c_void_p, ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p] + + self.BN_cmp = self._lib.BN_cmp + self.BN_cmp.restype = ctypes.c_int + self.BN_cmp.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.BN_bn2bin = self._lib.BN_bn2bin + self.BN_bn2bin.restype = ctypes.c_int + self.BN_bn2bin.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.BN_bin2bn = self._lib.BN_bin2bn + self.BN_bin2bn.restype = ctypes.c_void_p + self.BN_bin2bn.argtypes = [ctypes.c_void_p, ctypes.c_int, + ctypes.c_void_p] + + self.EC_KEY_new_by_curve_name = self._lib.EC_KEY_new_by_curve_name + self.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p + self.EC_KEY_new_by_curve_name.argtypes = [ctypes.c_int] + + self.EC_KEY_get0_group = self._lib.EC_KEY_get0_group + self.EC_KEY_get0_group.restype = ctypes.c_void_p + self.EC_KEY_get0_group.argtypes = [ctypes.c_void_p] + + self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key + self.EC_KEY_set_private_key.restype = ctypes.c_int + self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, + ctypes.c_void_p] + + self.EC_KEY_set_public_key = self._lib.EC_KEY_set_public_key + self.EC_KEY_set_public_key.restype = ctypes.c_int + self.EC_KEY_set_public_key.argtypes = [ctypes.c_void_p, + ctypes.c_void_p] + + self.EC_POINT_set_compressed_coordinates_GFp = self._lib.EC_POINT_set_compressed_coordinates_GFp + self.EC_POINT_set_compressed_coordinates_GFp.restype = ctypes.c_int + self.EC_POINT_set_compressed_coordinates_GFp.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p] + + self.EC_POINT_new = self._lib.EC_POINT_new + self.EC_POINT_new.restype = ctypes.c_void_p + self.EC_POINT_new.argtypes = [ctypes.c_void_p] + + self.EC_POINT_free = self._lib.EC_POINT_free + self.EC_POINT_free.restype = None + self.EC_POINT_free.argtypes = [ctypes.c_void_p] + + self.EC_GROUP_get_order = self._lib.EC_GROUP_get_order + self.EC_GROUP_get_order.restype = ctypes.c_void_p + self.EC_GROUP_get_order.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] + + self.EC_GROUP_get_degree = self._lib.EC_GROUP_get_degree + self.EC_GROUP_get_degree.restype = ctypes.c_void_p + self.EC_GROUP_get_degree.argtypes = [ctypes.c_void_p] + + self.EC_GROUP_get_curve_GFp = self._lib.EC_GROUP_get_curve_GFp + self.EC_GROUP_get_curve_GFp.restype = ctypes.c_void_p + self.EC_GROUP_get_curve_GFp.argtypes = [ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p] + + self.EC_POINT_mul = self._lib.EC_POINT_mul + self.EC_POINT_mul.restype = ctypes.c_int + self.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_void_p] + + self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key + self.EC_KEY_set_private_key.restype = ctypes.c_int + self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, + ctypes.c_void_p] + + self.EC_KEY_set_conv_form = self._lib.EC_KEY_set_conv_form + self.EC_KEY_set_conv_form.restype = None + self.EC_KEY_set_conv_form.argtypes = [ctypes.c_void_p, + ctypes.c_int] + + self.BN_CTX_new = self._lib.BN_CTX_new + self._lib.BN_CTX_new.restype = ctypes.c_void_p + self._lib.BN_CTX_new.argtypes = [] + + self.BN_CTX_start = self._lib.BN_CTX_start + self._lib.BN_CTX_start.restype = ctypes.c_void_p + self._lib.BN_CTX_start.argtypes = [ctypes.c_void_p] + + self.BN_CTX_get = self._lib.BN_CTX_get + self._lib.BN_CTX_get.restype = ctypes.c_void_p + self._lib.BN_CTX_get.argtypes = [ctypes.c_void_p] + + self.ECDSA_sign = self._lib.ECDSA_sign + self.ECDSA_sign.restype = ctypes.c_int + self.ECDSA_sign.argtypes = [ctypes.c_int, ctypes.c_void_p, + ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] + + self.ECDSA_verify = self._lib.ECDSA_verify + self.ECDSA_verify.restype = ctypes.c_int + self.ECDSA_verify.argtypes = [ctypes.c_int, ctypes.c_void_p, + ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p] + + self.i2o_ECPublicKey = self._lib.i2o_ECPublicKey + self.i2o_ECPublicKey.restype = ctypes.c_void_p + self.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.BN_CTX_free = self._lib.BN_CTX_free + self.BN_CTX_free.restype = None + self.BN_CTX_free.argtypes = [ctypes.c_void_p] + + self.EC_POINT_free = self._lib.EC_POINT_free + self.EC_POINT_free.restype = None + self.EC_POINT_free.argtypes = [ctypes.c_void_p] + +ssl = None + +def openLibrary(): + global ssl + try: + if sys.platform.startswith("win"): + dll_path = "src/lib/opensslVerify/libeay32.dll" + elif sys.platform == "cygwin": + dll_path = "/bin/cygcrypto-1.0.0.dll" + else: + dll_path = "/usr/local/ssl/lib/libcrypto.so" + ssl = _OpenSSL(dll_path) + assert ssl + except Exception, err: + ssl = _OpenSSL(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or ctypes.util.find_library('libcrypto') or 'libeay32') + +openLibrary() +openssl_version = "%.9X" % ssl._lib.SSLeay() + +NID_secp256k1 = 714 + + +def check_result(val, func, args): + if val == 0: + raise ValueError + else: + return ctypes.c_void_p(val) + +ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p +ssl.EC_KEY_new_by_curve_name.errcheck = check_result + +POINT_CONVERSION_COMPRESSED = 2 +POINT_CONVERSION_UNCOMPRESSED = 4 + +__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' +__b58base = len(__b58chars) + + +def b58encode(v): + """ encode v, which is a string of bytes, to base58. + """ + + long_value = 0L + for (i, c) in enumerate(v[::-1]): + long_value += (256 ** i) * ord(c) + + result = '' + while long_value >= __b58base: + div, mod = divmod(long_value, __b58base) + result = __b58chars[mod] + result + long_value = div + result = __b58chars[long_value] + result + + # Bitcoin does a little leading-zero-compression: + # leading 0-bytes in the input become leading-1s + nPad = 0 + for c in v: + if c == '\0': + nPad += 1 + else: + break + + return (__b58chars[0] * nPad) + result + + +def hash_160(public_key): + md = hashlib.new('ripemd160') + md.update(hashlib.sha256(public_key).digest()) + return md.digest() + + +def hash_160_to_bc_address(h160): + vh160 = chr(addrtype) + h160 + h = Hash(vh160) + addr = vh160 + h[0:4] + return b58encode(addr) + + +def public_key_to_bc_address(public_key): + h160 = hash_160(public_key) + return hash_160_to_bc_address(h160) + + +def encode(val, base, minlen=0): + base, minlen = int(base), int(minlen) + code_string = ''.join([chr(x) for x in range(256)]) + result = "" + while val > 0: + result = code_string[val % base] + result + val //= base + return code_string[0] * max(minlen - len(result), 0) + result + + +def num_to_var_int(x): + x = int(x) + if x < 253: + return chr(x) + elif x < 65536: + return chr(253) + encode(x, 256, 2)[::-1] + elif x < 4294967296: + return chr(254) + encode(x, 256, 4)[::-1] + else: + return chr(255) + encode(x, 256, 8)[::-1] + + +def msg_magic(message): + return "\x18Bitcoin Signed Message:\n" + num_to_var_int(len(message)) + message + + +def get_address(eckey): + size = ssl.i2o_ECPublicKey(eckey, 0) + mb = ctypes.create_string_buffer(size) + ssl.i2o_ECPublicKey(eckey, ctypes.byref(ctypes.pointer(mb))) + return public_key_to_bc_address(mb.raw) + + +def Hash(data): + return hashlib.sha256(hashlib.sha256(data).digest()).digest() + + +def bx(bn, size=32): + b = ctypes.create_string_buffer(size) + ssl.BN_bn2bin(bn, b) + return b.raw.encode('hex') + + +def verify_message(address, signature, message): + pkey = ssl.EC_KEY_new_by_curve_name(NID_secp256k1) + eckey = SetCompactSignature(pkey, Hash(msg_magic(message)), signature) + addr = get_address(eckey) + return (address == addr) + + +def SetCompactSignature(pkey, hash, signature): + sig = base64.b64decode(signature) + if len(sig) != 65: + raise BaseException("Wrong encoding") + nV = ord(sig[0]) + if nV < 27 or nV >= 35: + return False + if nV >= 31: + ssl.EC_KEY_set_conv_form(pkey, POINT_CONVERSION_COMPRESSED) + nV -= 4 + r = ssl.BN_bin2bn(sig[1:33], 32, None) + s = ssl.BN_bin2bn(sig[33:], 32, None) + eckey = ECDSA_SIG_recover_key_GFp(pkey, r, s, hash, len(hash), nV - 27, + False) + return eckey + + +def ECDSA_SIG_recover_key_GFp(eckey, r, s, msg, msglen, recid, check): + n = 0 + i = recid / 2 + ctx = R = O = Q = None + + try: + group = ssl.EC_KEY_get0_group(eckey) + ctx = ssl.BN_CTX_new() + ssl.BN_CTX_start(ctx) + order = ssl.BN_CTX_get(ctx) + ssl.EC_GROUP_get_order(group, order, ctx) + x = ssl.BN_CTX_get(ctx) + ssl.BN_copy(x, order) + ssl.BN_mul_word(x, i) + ssl.BN_add(x, x, r) + field = ssl.BN_CTX_get(ctx) + ssl.EC_GROUP_get_curve_GFp(group, field, None, None, ctx) + + if (ssl.BN_cmp(x, field) >= 0): + return False + + R = ssl.EC_POINT_new(group) + ssl.EC_POINT_set_compressed_coordinates_GFp(group, R, x, recid % 2, ctx) + + if check: + O = ssl.EC_POINT_new(group) + ssl.EC_POINT_mul(group, O, None, R, order, ctx) + if ssl.EC_POINT_is_at_infinity(group, O): + return False + + Q = ssl.EC_POINT_new(group) + n = ssl.EC_GROUP_get_degree(group) + e = ssl.BN_CTX_get(ctx) + ssl.BN_bin2bn(msg, msglen, e) + if 8 * msglen > n: + ssl.BN_rshift(e, e, 8 - (n & 7)) + + zero = ssl.BN_CTX_get(ctx) + ssl.BN_set_word(zero, 0) + ssl.BN_mod_sub(e, zero, e, order, ctx) + rr = ssl.BN_CTX_get(ctx) + ssl.BN_mod_inverse(rr, r, order, ctx) + sor = ssl.BN_CTX_get(ctx) + ssl.BN_mod_mul(sor, s, rr, order, ctx) + eor = ssl.BN_CTX_get(ctx) + ssl.BN_mod_mul(eor, e, rr, order, ctx) + ssl.EC_POINT_mul(group, Q, eor, R, sor, ctx) + ssl.EC_KEY_set_public_key(eckey, Q) + return eckey + finally: + if ctx: + ssl.BN_CTX_free(ctx) + if R: + ssl.EC_POINT_free(R) + if O: + ssl.EC_POINT_free(O) + if Q: + ssl.EC_POINT_free(Q) + + +def closeLibrary(): + handle = ssl._lib._handle + if "FreeLibrary" in dir(_ctypes): + _ctypes.FreeLibrary(handle) + _ctypes.FreeLibrary(handle) + print "OpenSSL closed, handle:", handle + else: + _ctypes.dlclose(handle) + _ctypes.dlclose(handle) + print "OpenSSL dlclosed, handle:", handle + + +def getMessagePubkey(message, sig): + pkey = ssl.EC_KEY_new_by_curve_name(NID_secp256k1) + if type(pkey) is not int and not pkey.value: + raise Exception( + "OpenSSL %s (%s) EC_KEY_new_by_curve_name failed: %s, probably your OpenSSL lib does not support secp256k1 elliptic curve. Please check: https://github.com/HelloZeroNet/ZeroNet/issues/132" % + (openssl_version, ssl._lib._name, pkey.value) + ) + eckey = SetCompactSignature(pkey, Hash(msg_magic(message)), sig) + size = ssl.i2o_ECPublicKey(eckey, 0) + mb = ctypes.create_string_buffer(size) + ssl.i2o_ECPublicKey(eckey, ctypes.byref(ctypes.pointer(mb))) + pub = mb.raw + """ + if time.time() - ssl.time_opened > 60 * 5: # Reopen every 5 min + logging.debug("Reopening OpenSSL...") + closeLibrary() + openLibrary() + """ + return pub + + +def test(): + sign = "HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ=" + pubkey = "044827c756561b8ef6b28b5e53a000805adbf4938ab82e1c2b7f7ea16a0d6face9a509a0a13e794d742210b00581f3e249ebcc705240af2540ea19591091ac1d41" + assert getMessagePubkey("hello", sign).encode("hex") == pubkey + +test() # Make sure it working right + +if __name__ == "__main__": + import time + import os + import sys + sys.path.append("..") + from pybitcointools import bitcoin as btctools + print "OpenSSL version %s" % openssl_version + priv = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk" + address = "1N2XWu5soeppX2qUjvrf81rpdbShKJrjTr" + sign = btctools.ecdsa_sign("hello", priv) # HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ= + + s = time.time() + for i in range(100): + pubkey = getMessagePubkey("hello", sign) + verified = btctools.pubkey_to_address(pubkey) == address + print "100x Verified", verified, time.time() - s diff --git a/src/lib/opensslVerify/ssleay32.dll b/src/lib/opensslVerify/ssleay32.dll new file mode 100644 index 00000000..b8b86115 Binary files /dev/null and b/src/lib/opensslVerify/ssleay32.dll differ diff --git a/src/lib/pyaes/README.md b/src/lib/pyaes/README.md deleted file mode 100644 index 26e3b2ba..00000000 --- a/src/lib/pyaes/README.md +++ /dev/null @@ -1,363 +0,0 @@ -pyaes -===== - -A pure-Python implementation of the AES block cipher algorithm and the common modes of operation (CBC, CFB, CTR, ECB and OFB). - - -Features --------- - -* Supports all AES key sizes -* Supports all AES common modes -* Pure-Python (no external dependencies) -* BlockFeeder API allows streams to easily be encrypted and decrypted -* Python 2.x and 3.x support (make sure you pass in bytes(), not strings for Python 3) - - -API ---- - -All keys may be 128 bits (16 bytes), 192 bits (24 bytes) or 256 bits (32 bytes) long. - -To generate a random key use: -```python -import os - -# 128 bit, 192 bit and 256 bit keys -key_128 = os.urandom(16) -key_192 = os.urandom(24) -key_256 = os.urandom(32) -``` - -To generate keys from simple-to-remember passwords, consider using a _password-based key-derivation function_ such as [scrypt](https://github.com/ricmoo/pyscrypt). - - -### Common Modes of Operation - -There are many modes of operations, each with various pros and cons. In general though, the **CBC** and **CTR** modes are recommended. The **ECB is NOT recommended.**, and is included primarily for completeness. - -Each of the following examples assumes the following key: -```python -import pyaes - -# A 256 bit (32 byte) key -key = "This_key_for_demo_purposes_only!" - -# For some modes of operation we need a random initialization vector -# of 16 bytes -iv = "InitializationVe" -``` - - -#### Counter Mode of Operation (recommended) - -```python -aes = pyaes.AESModeOfOperationCTR(key) -plaintext = "Text may be any length you wish, no padding is required" -ciphertext = aes.encrypt(plaintext) - -# '''\xb6\x99\x10=\xa4\x96\x88\xd1\x89\x1co\xe6\x1d\xef;\x11\x03\xe3\xee -# \xa9V?wY\xbfe\xcdO\xe3\xdf\x9dV\x19\xe5\x8dk\x9fh\xb87>\xdb\xa3\xd6 -# \x86\xf4\xbd\xb0\x97\xf1\t\x02\xe9 \xed''' -print repr(ciphertext) - -# The counter mode of operation maintains state, so decryption requires -# a new instance be created -aes = pyaes.AESModeOfOperationCTR(key) -decrypted = aes.decrypt(ciphertext) - -# True -print decrypted == plaintext - -# To use a custom initial value -counter = pyaes.Counter(initial_value = 100) -aes = pyaes.AESModeOfOperationCTR(key, counter = counter) -ciphertext = aes.encrypt(plaintext) - -# '''WZ\x844\x02\xbfoY\x1f\x12\xa6\xce\x03\x82Ei)\xf6\x97mX\x86\xe3\x9d -# _1\xdd\xbd\x87\xb5\xccEM_4\x01$\xa6\x81\x0b\xd5\x04\xd7Al\x07\xe5 -# \xb2\x0e\\\x0f\x00\x13,\x07''' -print repr(ciphertext) -``` - - -#### Cipher-Block Chaining (recommended) - -```python -aes = pyaes.AESModeOfOperationCBC(key, iv = iv) -plaintext = "TextMustBe16Byte" -ciphertext = aes.encrypt(plaintext) - -# '\xd6:\x18\xe6\xb1\xb3\xc3\xdc\x87\xdf\xa7|\x08{k\xb6' -print repr(ciphertext) - - -# The cipher-block chaining mode of operation maintains state, so -# decryption requires a new instance be created -aes = pyaes.AESModeOfOperationCBC(key, iv = iv) -decrypted = aes.decrypt(ciphertext) - -# True -print decrypted == plaintext -``` - - -#### Cipher Feedback - -```python -# Each block into the mode of operation must be a multiple of the segment -# size. For this example we choose 8 bytes. -aes = pyaes.AESModeOfOperationCFB(key, iv = iv, segment_size = 8) -plaintext = "TextMustBeAMultipleOfSegmentSize" -ciphertext = aes.encrypt(plaintext) - -# '''v\xa9\xc1w"\x8aL\x93\xcb\xdf\xa0/\xf8Y\x0b\x8d\x88i\xcb\x85rmp -# \x85\xfe\xafM\x0c)\xd5\xeb\xaf''' -print repr(ciphertext) - - -# The cipher-block chaining mode of operation maintains state, so -# decryption requires a new instance be created -aes = pyaes.AESModeOfOperationCFB(key, iv = iv, segment_size = 8) -decrypted = aes.decrypt(ciphertext) - -# True -print decrypted == plaintext -``` - - -#### Output Feedback Mode of Operation - -```python -aes = pyaes.AESModeOfOperationOFB(key, iv = iv) -plaintext = "Text may be any length you wish, no padding is required" -ciphertext = aes.encrypt(plaintext) - -# '''v\xa9\xc1wO\x92^\x9e\rR\x1e\xf7\xb1\xa2\x9d"l1\xc7\xe7\x9d\x87(\xc26s -# \xdd8\xc8@\xb6\xd9!\xf5\x0cM\xaa\x9b\xc4\xedLD\xe4\xb9\xd8\xdf\x9e\xac -# \xa1\xb8\xea\x0f\x8ev\xb5''' -print repr(ciphertext) - -# The counter mode of operation maintains state, so decryption requires -# a new instance be created -aes = pyaes.AESModeOfOperationOFB(key, iv = iv) -decrypted = aes.decrypt(ciphertext) - -# True -print decrypted == plaintext -``` - - -#### Electronic Codebook (NOT recommended) - -```python -aes = pyaes.AESModeOfOperationECB(key) -plaintext = "TextMustBe16Byte" -ciphertext = aes.encrypt(plaintext) - -# 'L6\x95\x85\xe4\xd9\xf1\x8a\xfb\xe5\x94X\x80|\x19\xc3' -print repr(ciphertext) - -# Since there is no state stored in this mode of operation, it -# is not necessary to create a new aes object for decryption. -#aes = pyaes.AESModeOfOperationECB(key) -decrypted = aes.decrypt(ciphertext) - -# True -print decrypted == plaintext -``` - - -### BlockFeeder - -Since most of the modes of operations require data in specific block-sized or segment-sized blocks, it can be difficult when working with large arbitrary streams or strings of data. - -The BlockFeeder class is meant to make life easier for you, by buffering bytes across multiple calls and returning bytes as they are available, as well as padding or stripping the output when finished, if necessary. - -```python -import pyaes - -# Any mode of operation can be used; for this example CBC -key = "This_key_for_demo_purposes_only!" -iv = "InitializationVe" - -ciphertext = '' - -# We can encrypt one line at a time, regardles of length -encrypter = pyaes.Encrypter(pyaes.AESModeOfOperationCBC(key, iv)) -for line in file('/etc/passwd'): - ciphertext += encrypter.feed(line) - -# Make a final call to flush any remaining bytes and add paddin -ciphertext += encrypter.feed() - -# We can decrypt the cipher text in chunks (here we split it in half) -decrypter = pyaes.Decrypter(pyaes.AESModeOfOperationCBC(key, iv)) -decrypted = decrypter.feed(ciphertext[:len(ciphertext) / 2]) -decrypted += decrypter.feed(ciphertext[len(ciphertext) / 2:]) - -# Again, make a final call to flush any remaining bytes and strip padding -decrypted += decrypter.feed() - -print file('/etc/passwd').read() == decrypted -``` - -### Stream Feeder - -This is meant to make it even easier to encrypt and decrypt streams and large files. - -```python -import pyaes - -# Any mode of operation can be used; for this example CTR -key = "This_key_for_demo_purposes_only!" - -# Create the mode of operation to encrypt with -mode = pyaes.AESModeOfOperationCTR(key) - -# The input and output files -file_in = file('/etc/passwd') -file_out = file('/tmp/encrypted.bin', 'wb') - -# Encrypt the data as a stream, the file is read in 8kb chunks, be default -pyaes.encrypt_stream(mode, file_in, file_out) - -# Close the files -file_in.close() -file_out.close() -``` - -Decrypting is identical, except you would use `pyaes.decrypt_stream`, and the encrypted file would be the `file_in` and target for decryption the `file_out`. - -### AES block cipher - -Generally you should use one of the modes of operation above. This may however be useful for experimenting with a custom mode of operation or dealing with encrypted blocks. - -The block cipher requires exactly one block of data to encrypt or decrypt, and each block should be an array with each element an integer representation of a byte. - -```python -import pyaes - -# 16 byte block of plain text -plaintext = "Hello World!!!!!" -plaintext_bytes = [ ord(c) for c in plaintext ] - -# 32 byte key (256 bit) -key = "This_key_for_demo_purposes_only!" - -# Our AES instance -aes = pyaes.AES(key) - -# Encrypt! -ciphertext = aes.encrypt(plaintext_bytes) - -# [55, 250, 182, 25, 185, 208, 186, 95, 206, 115, 50, 115, 108, 58, 174, 115] -print repr(ciphertext) - -# Decrypt! -decrypted = aes.decrypt(ciphertext) - -# True -print decrypted == plaintext_bytes -``` - -What is a key? --------------- - -This seems to be a point of confusion for many people new to using encryption. You can think of the key as the *"password"*. However, these algorithms require the *"password"* to be a specific length. - -With AES, there are three possible key lengths, 16-bytes, 24-bytes or 32-bytes. When you create an AES object, the key size is automatically detected, so it is important to pass in a key of the correct length. - -Often, you wish to provide a password of arbitrary length, for example, something easy to remember or write down. In these cases, you must come up with a way to transform the password into a key, of a specific length. A **Password-Based Key Derivation Function** (PBKDF) is an algorithm designed for this exact purpose. - -Here is an example, using the popular (possibly obsolete?) *crypt* PBKDF: - -``` -# See: https://www.dlitz.net/software/python-pbkdf2/ -import pbkdf2 - -password = "HelloWorld" - -# The crypt PBKDF returns a 48-byte string -key = pbkdf2.crypt(password) - -# A 16-byte, 24-byte and 32-byte key, respectively -key_16 = key[:16] -key_24 = key[:24] -key_32 = key[:32] -``` - -The [scrypt](https://github.com/ricmoo/pyscrypt) PBKDF is intentionally slow, to make it more difficult to brute-force guess a password: - -``` -# See: https://github.com/ricmoo/pyscrypt -import pyscrypt - -password = "HelloWorld" - -# Salt is required, and prevents Rainbow Table attacks -salt = "SeaSalt" - -# N, r, and p are parameters to specify how difficult it should be to -# generate a key; bigger numbers take longer and more memory -N = 1024 -r = 1 -p = 1 - -# A 16-byte, 24-byte and 32-byte key, respectively; the scrypt algorithm takes -# a 6-th parameter, indicating key length -key_16 = pyscrypt.hash(password, salt, N, r, p, 16) -key_24 = pyscrypt.hash(password, salt, N, r, p, 24) -key_32 = pyscrypt.hash(password, salt, N, r, p, 32) -``` - -Another possibility, is to use a hashing function, such as SHA256 to hash the password, but this method may be vulnerable to [Rainbow Attacks](http://en.wikipedia.org/wiki/Rainbow_table), unless you use a [salt](http://en.wikipedia.org/wiki/Salt_(cryptography)). - -```python -import hashlib - -password = "HelloWorld" - -# The SHA256 hash algorithm returns a 32-byte string -hashed = hashlib.sha256(password).digest() - -# A 16-byte, 24-byte and 32-byte key, respectively -key_16 = hashed[:16] -key_24 = hashed[:24] -key_32 = hashed -``` - - - - -Performance ------------ - -There is a test case provided in _/tests/test-aes.py_ which does some basic performance testing (its primary purpose is moreso as a regression test). - -Based on that test, in **CPython**, this library is about 30x slower than [PyCrypto](https://www.dlitz.net/software/pycrypto/) for CBC, ECB and OFB; about 80x slower for CFB; and 300x slower for CTR. - -Based on that same test, in **Pypy**, this library is about 4x slower than [PyCrypto](https://www.dlitz.net/software/pycrypto/) for CBC, ECB and OFB; about 12x slower for CFB; and 19x slower for CTR. - -The PyCrypto documentation makes reference to the counter call being responsible for the speed problems of the counter (CTR) mode of operation, which is why they use a specially optimized counter. I will investigate this problem further in the future. - - -FAQ ---- - -#### Why do this? - -The short answer, *why not?* - -The longer answer, is for my [pyscrypt](https://github.com/ricmoo/pyscrypt) library. I required a pure-Python AES implementation that supported 256-bit keys with the counter (CTR) mode of operation. After searching, I found several implementations, but all were missing CTR or only supported 128 bit keys. After all the work of learning AES inside and out to implement the library, it was only a marginal amount of extra work to library-ify a more general solution. So, *why not?* - -#### How do I get a question I have added? - -E-mail me at pyaes@ricmoo.com with any questions, suggestions, comments, et cetera. - - -#### Can I give you my money? - -Umm... Ok? :-) - -_Bitcoin_ - `18UDs4qV1shu2CgTS2tKojhCtM69kpnWg9` diff --git a/src/lib/pyaes/__init__.py b/src/lib/pyaes/__init__.py deleted file mode 100644 index 5712f794..00000000 --- a/src/lib/pyaes/__init__.py +++ /dev/null @@ -1,53 +0,0 @@ -# The MIT License (MIT) -# -# Copyright (c) 2014 Richard Moore -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -# This is a pure-Python implementation of the AES algorithm and AES common -# modes of operation. - -# See: https://en.wikipedia.org/wiki/Advanced_Encryption_Standard -# See: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation - - -# Supported key sizes: -# 128-bit -# 192-bit -# 256-bit - - -# Supported modes of operation: -# ECB - Electronic Codebook -# CBC - Cipher-Block Chaining -# CFB - Cipher Feedback -# OFB - Output Feedback -# CTR - Counter - -# See the README.md for API details and general information. - -# Also useful, PyCrypto, a crypto library implemented in C with Python bindings: -# https://www.dlitz.net/software/pycrypto/ - - -VERSION = [1, 3, 0] - -from .aes import AES, AESModeOfOperationCTR, AESModeOfOperationCBC, AESModeOfOperationCFB, AESModeOfOperationECB, AESModeOfOperationOFB, AESModesOfOperation, Counter -from .blockfeeder import decrypt_stream, Decrypter, encrypt_stream, Encrypter -from .blockfeeder import PADDING_NONE, PADDING_DEFAULT diff --git a/src/lib/pyaes/aes.py b/src/lib/pyaes/aes.py deleted file mode 100644 index c6e8bc02..00000000 --- a/src/lib/pyaes/aes.py +++ /dev/null @@ -1,589 +0,0 @@ -# The MIT License (MIT) -# -# Copyright (c) 2014 Richard Moore -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -# This is a pure-Python implementation of the AES algorithm and AES common -# modes of operation. - -# See: https://en.wikipedia.org/wiki/Advanced_Encryption_Standard - -# Honestly, the best description of the modes of operations are the wonderful -# diagrams on Wikipedia. They explain in moments what my words could never -# achieve. Hence the inline documentation here is sparer than I'd prefer. -# See: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation - -# Also useful, PyCrypto, a crypto library implemented in C with Python bindings: -# https://www.dlitz.net/software/pycrypto/ - - -# Supported key sizes: -# 128-bit -# 192-bit -# 256-bit - - -# Supported modes of operation: -# ECB - Electronic Codebook -# CBC - Cipher-Block Chaining -# CFB - Cipher Feedback -# OFB - Output Feedback -# CTR - Counter - - -# See the README.md for API details and general information. - - -import copy -import struct - -__all__ = ["AES", "AESModeOfOperationCTR", "AESModeOfOperationCBC", "AESModeOfOperationCFB", - "AESModeOfOperationECB", "AESModeOfOperationOFB", "AESModesOfOperation", "Counter"] - - -def _compact_word(word): - return (word[0] << 24) | (word[1] << 16) | (word[2] << 8) | word[3] - -def _string_to_bytes(text): - return list(ord(c) for c in text) - -def _bytes_to_string(binary): - return "".join(chr(b) for b in binary) - -def _concat_list(a, b): - return a + b - - -# Python 3 compatibility -try: - xrange -except Exception: - xrange = range - - # Python 3 supports bytes, which is already an array of integers - def _string_to_bytes(text): - if isinstance(text, bytes): - return text - return [ord(c) for c in text] - - # In Python 3, we return bytes - def _bytes_to_string(binary): - return bytes(binary) - - # Python 3 cannot concatenate a list onto a bytes, so we bytes-ify it first - def _concat_list(a, b): - return a + bytes(b) - - -# Based *largely* on the Rijndael implementation -# See: http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf -class AES(object): - '''Encapsulates the AES block cipher. - - You generally should not need this. Use the AESModeOfOperation classes - below instead.''' - - # Number of rounds by keysize - number_of_rounds = {16: 10, 24: 12, 32: 14} - - # Round constant words - rcon = [ 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91 ] - - # S-box and Inverse S-box (S is for Substitution) - S = [ 0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76, 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0, 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15, 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75, 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84, 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf, 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8, 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2, 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73, 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb, 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79, 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08, 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a, 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e, 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf, 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16 ] - Si =[ 0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb, 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb, 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e, 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25, 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92, 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84, 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06, 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b, 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73, 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e, 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b, 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4, 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f, 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef, 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61, 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d ] - - # Transformations for encryption - T1 = [ 0xc66363a5, 0xf87c7c84, 0xee777799, 0xf67b7b8d, 0xfff2f20d, 0xd66b6bbd, 0xde6f6fb1, 0x91c5c554, 0x60303050, 0x02010103, 0xce6767a9, 0x562b2b7d, 0xe7fefe19, 0xb5d7d762, 0x4dababe6, 0xec76769a, 0x8fcaca45, 0x1f82829d, 0x89c9c940, 0xfa7d7d87, 0xeffafa15, 0xb25959eb, 0x8e4747c9, 0xfbf0f00b, 0x41adadec, 0xb3d4d467, 0x5fa2a2fd, 0x45afafea, 0x239c9cbf, 0x53a4a4f7, 0xe4727296, 0x9bc0c05b, 0x75b7b7c2, 0xe1fdfd1c, 0x3d9393ae, 0x4c26266a, 0x6c36365a, 0x7e3f3f41, 0xf5f7f702, 0x83cccc4f, 0x6834345c, 0x51a5a5f4, 0xd1e5e534, 0xf9f1f108, 0xe2717193, 0xabd8d873, 0x62313153, 0x2a15153f, 0x0804040c, 0x95c7c752, 0x46232365, 0x9dc3c35e, 0x30181828, 0x379696a1, 0x0a05050f, 0x2f9a9ab5, 0x0e070709, 0x24121236, 0x1b80809b, 0xdfe2e23d, 0xcdebeb26, 0x4e272769, 0x7fb2b2cd, 0xea75759f, 0x1209091b, 0x1d83839e, 0x582c2c74, 0x341a1a2e, 0x361b1b2d, 0xdc6e6eb2, 0xb45a5aee, 0x5ba0a0fb, 0xa45252f6, 0x763b3b4d, 0xb7d6d661, 0x7db3b3ce, 0x5229297b, 0xdde3e33e, 0x5e2f2f71, 0x13848497, 0xa65353f5, 0xb9d1d168, 0x00000000, 0xc1eded2c, 0x40202060, 0xe3fcfc1f, 0x79b1b1c8, 0xb65b5bed, 0xd46a6abe, 0x8dcbcb46, 0x67bebed9, 0x7239394b, 0x944a4ade, 0x984c4cd4, 0xb05858e8, 0x85cfcf4a, 0xbbd0d06b, 0xc5efef2a, 0x4faaaae5, 0xedfbfb16, 0x864343c5, 0x9a4d4dd7, 0x66333355, 0x11858594, 0x8a4545cf, 0xe9f9f910, 0x04020206, 0xfe7f7f81, 0xa05050f0, 0x783c3c44, 0x259f9fba, 0x4ba8a8e3, 0xa25151f3, 0x5da3a3fe, 0x804040c0, 0x058f8f8a, 0x3f9292ad, 0x219d9dbc, 0x70383848, 0xf1f5f504, 0x63bcbcdf, 0x77b6b6c1, 0xafdada75, 0x42212163, 0x20101030, 0xe5ffff1a, 0xfdf3f30e, 0xbfd2d26d, 0x81cdcd4c, 0x180c0c14, 0x26131335, 0xc3ecec2f, 0xbe5f5fe1, 0x359797a2, 0x884444cc, 0x2e171739, 0x93c4c457, 0x55a7a7f2, 0xfc7e7e82, 0x7a3d3d47, 0xc86464ac, 0xba5d5de7, 0x3219192b, 0xe6737395, 0xc06060a0, 0x19818198, 0x9e4f4fd1, 0xa3dcdc7f, 0x44222266, 0x542a2a7e, 0x3b9090ab, 0x0b888883, 0x8c4646ca, 0xc7eeee29, 0x6bb8b8d3, 0x2814143c, 0xa7dede79, 0xbc5e5ee2, 0x160b0b1d, 0xaddbdb76, 0xdbe0e03b, 0x64323256, 0x743a3a4e, 0x140a0a1e, 0x924949db, 0x0c06060a, 0x4824246c, 0xb85c5ce4, 0x9fc2c25d, 0xbdd3d36e, 0x43acacef, 0xc46262a6, 0x399191a8, 0x319595a4, 0xd3e4e437, 0xf279798b, 0xd5e7e732, 0x8bc8c843, 0x6e373759, 0xda6d6db7, 0x018d8d8c, 0xb1d5d564, 0x9c4e4ed2, 0x49a9a9e0, 0xd86c6cb4, 0xac5656fa, 0xf3f4f407, 0xcfeaea25, 0xca6565af, 0xf47a7a8e, 0x47aeaee9, 0x10080818, 0x6fbabad5, 0xf0787888, 0x4a25256f, 0x5c2e2e72, 0x381c1c24, 0x57a6a6f1, 0x73b4b4c7, 0x97c6c651, 0xcbe8e823, 0xa1dddd7c, 0xe874749c, 0x3e1f1f21, 0x964b4bdd, 0x61bdbddc, 0x0d8b8b86, 0x0f8a8a85, 0xe0707090, 0x7c3e3e42, 0x71b5b5c4, 0xcc6666aa, 0x904848d8, 0x06030305, 0xf7f6f601, 0x1c0e0e12, 0xc26161a3, 0x6a35355f, 0xae5757f9, 0x69b9b9d0, 0x17868691, 0x99c1c158, 0x3a1d1d27, 0x279e9eb9, 0xd9e1e138, 0xebf8f813, 0x2b9898b3, 0x22111133, 0xd26969bb, 0xa9d9d970, 0x078e8e89, 0x339494a7, 0x2d9b9bb6, 0x3c1e1e22, 0x15878792, 0xc9e9e920, 0x87cece49, 0xaa5555ff, 0x50282878, 0xa5dfdf7a, 0x038c8c8f, 0x59a1a1f8, 0x09898980, 0x1a0d0d17, 0x65bfbfda, 0xd7e6e631, 0x844242c6, 0xd06868b8, 0x824141c3, 0x299999b0, 0x5a2d2d77, 0x1e0f0f11, 0x7bb0b0cb, 0xa85454fc, 0x6dbbbbd6, 0x2c16163a ] - T2 = [ 0xa5c66363, 0x84f87c7c, 0x99ee7777, 0x8df67b7b, 0x0dfff2f2, 0xbdd66b6b, 0xb1de6f6f, 0x5491c5c5, 0x50603030, 0x03020101, 0xa9ce6767, 0x7d562b2b, 0x19e7fefe, 0x62b5d7d7, 0xe64dabab, 0x9aec7676, 0x458fcaca, 0x9d1f8282, 0x4089c9c9, 0x87fa7d7d, 0x15effafa, 0xebb25959, 0xc98e4747, 0x0bfbf0f0, 0xec41adad, 0x67b3d4d4, 0xfd5fa2a2, 0xea45afaf, 0xbf239c9c, 0xf753a4a4, 0x96e47272, 0x5b9bc0c0, 0xc275b7b7, 0x1ce1fdfd, 0xae3d9393, 0x6a4c2626, 0x5a6c3636, 0x417e3f3f, 0x02f5f7f7, 0x4f83cccc, 0x5c683434, 0xf451a5a5, 0x34d1e5e5, 0x08f9f1f1, 0x93e27171, 0x73abd8d8, 0x53623131, 0x3f2a1515, 0x0c080404, 0x5295c7c7, 0x65462323, 0x5e9dc3c3, 0x28301818, 0xa1379696, 0x0f0a0505, 0xb52f9a9a, 0x090e0707, 0x36241212, 0x9b1b8080, 0x3ddfe2e2, 0x26cdebeb, 0x694e2727, 0xcd7fb2b2, 0x9fea7575, 0x1b120909, 0x9e1d8383, 0x74582c2c, 0x2e341a1a, 0x2d361b1b, 0xb2dc6e6e, 0xeeb45a5a, 0xfb5ba0a0, 0xf6a45252, 0x4d763b3b, 0x61b7d6d6, 0xce7db3b3, 0x7b522929, 0x3edde3e3, 0x715e2f2f, 0x97138484, 0xf5a65353, 0x68b9d1d1, 0x00000000, 0x2cc1eded, 0x60402020, 0x1fe3fcfc, 0xc879b1b1, 0xedb65b5b, 0xbed46a6a, 0x468dcbcb, 0xd967bebe, 0x4b723939, 0xde944a4a, 0xd4984c4c, 0xe8b05858, 0x4a85cfcf, 0x6bbbd0d0, 0x2ac5efef, 0xe54faaaa, 0x16edfbfb, 0xc5864343, 0xd79a4d4d, 0x55663333, 0x94118585, 0xcf8a4545, 0x10e9f9f9, 0x06040202, 0x81fe7f7f, 0xf0a05050, 0x44783c3c, 0xba259f9f, 0xe34ba8a8, 0xf3a25151, 0xfe5da3a3, 0xc0804040, 0x8a058f8f, 0xad3f9292, 0xbc219d9d, 0x48703838, 0x04f1f5f5, 0xdf63bcbc, 0xc177b6b6, 0x75afdada, 0x63422121, 0x30201010, 0x1ae5ffff, 0x0efdf3f3, 0x6dbfd2d2, 0x4c81cdcd, 0x14180c0c, 0x35261313, 0x2fc3ecec, 0xe1be5f5f, 0xa2359797, 0xcc884444, 0x392e1717, 0x5793c4c4, 0xf255a7a7, 0x82fc7e7e, 0x477a3d3d, 0xacc86464, 0xe7ba5d5d, 0x2b321919, 0x95e67373, 0xa0c06060, 0x98198181, 0xd19e4f4f, 0x7fa3dcdc, 0x66442222, 0x7e542a2a, 0xab3b9090, 0x830b8888, 0xca8c4646, 0x29c7eeee, 0xd36bb8b8, 0x3c281414, 0x79a7dede, 0xe2bc5e5e, 0x1d160b0b, 0x76addbdb, 0x3bdbe0e0, 0x56643232, 0x4e743a3a, 0x1e140a0a, 0xdb924949, 0x0a0c0606, 0x6c482424, 0xe4b85c5c, 0x5d9fc2c2, 0x6ebdd3d3, 0xef43acac, 0xa6c46262, 0xa8399191, 0xa4319595, 0x37d3e4e4, 0x8bf27979, 0x32d5e7e7, 0x438bc8c8, 0x596e3737, 0xb7da6d6d, 0x8c018d8d, 0x64b1d5d5, 0xd29c4e4e, 0xe049a9a9, 0xb4d86c6c, 0xfaac5656, 0x07f3f4f4, 0x25cfeaea, 0xafca6565, 0x8ef47a7a, 0xe947aeae, 0x18100808, 0xd56fbaba, 0x88f07878, 0x6f4a2525, 0x725c2e2e, 0x24381c1c, 0xf157a6a6, 0xc773b4b4, 0x5197c6c6, 0x23cbe8e8, 0x7ca1dddd, 0x9ce87474, 0x213e1f1f, 0xdd964b4b, 0xdc61bdbd, 0x860d8b8b, 0x850f8a8a, 0x90e07070, 0x427c3e3e, 0xc471b5b5, 0xaacc6666, 0xd8904848, 0x05060303, 0x01f7f6f6, 0x121c0e0e, 0xa3c26161, 0x5f6a3535, 0xf9ae5757, 0xd069b9b9, 0x91178686, 0x5899c1c1, 0x273a1d1d, 0xb9279e9e, 0x38d9e1e1, 0x13ebf8f8, 0xb32b9898, 0x33221111, 0xbbd26969, 0x70a9d9d9, 0x89078e8e, 0xa7339494, 0xb62d9b9b, 0x223c1e1e, 0x92158787, 0x20c9e9e9, 0x4987cece, 0xffaa5555, 0x78502828, 0x7aa5dfdf, 0x8f038c8c, 0xf859a1a1, 0x80098989, 0x171a0d0d, 0xda65bfbf, 0x31d7e6e6, 0xc6844242, 0xb8d06868, 0xc3824141, 0xb0299999, 0x775a2d2d, 0x111e0f0f, 0xcb7bb0b0, 0xfca85454, 0xd66dbbbb, 0x3a2c1616 ] - T3 = [ 0x63a5c663, 0x7c84f87c, 0x7799ee77, 0x7b8df67b, 0xf20dfff2, 0x6bbdd66b, 0x6fb1de6f, 0xc55491c5, 0x30506030, 0x01030201, 0x67a9ce67, 0x2b7d562b, 0xfe19e7fe, 0xd762b5d7, 0xabe64dab, 0x769aec76, 0xca458fca, 0x829d1f82, 0xc94089c9, 0x7d87fa7d, 0xfa15effa, 0x59ebb259, 0x47c98e47, 0xf00bfbf0, 0xadec41ad, 0xd467b3d4, 0xa2fd5fa2, 0xafea45af, 0x9cbf239c, 0xa4f753a4, 0x7296e472, 0xc05b9bc0, 0xb7c275b7, 0xfd1ce1fd, 0x93ae3d93, 0x266a4c26, 0x365a6c36, 0x3f417e3f, 0xf702f5f7, 0xcc4f83cc, 0x345c6834, 0xa5f451a5, 0xe534d1e5, 0xf108f9f1, 0x7193e271, 0xd873abd8, 0x31536231, 0x153f2a15, 0x040c0804, 0xc75295c7, 0x23654623, 0xc35e9dc3, 0x18283018, 0x96a13796, 0x050f0a05, 0x9ab52f9a, 0x07090e07, 0x12362412, 0x809b1b80, 0xe23ddfe2, 0xeb26cdeb, 0x27694e27, 0xb2cd7fb2, 0x759fea75, 0x091b1209, 0x839e1d83, 0x2c74582c, 0x1a2e341a, 0x1b2d361b, 0x6eb2dc6e, 0x5aeeb45a, 0xa0fb5ba0, 0x52f6a452, 0x3b4d763b, 0xd661b7d6, 0xb3ce7db3, 0x297b5229, 0xe33edde3, 0x2f715e2f, 0x84971384, 0x53f5a653, 0xd168b9d1, 0x00000000, 0xed2cc1ed, 0x20604020, 0xfc1fe3fc, 0xb1c879b1, 0x5bedb65b, 0x6abed46a, 0xcb468dcb, 0xbed967be, 0x394b7239, 0x4ade944a, 0x4cd4984c, 0x58e8b058, 0xcf4a85cf, 0xd06bbbd0, 0xef2ac5ef, 0xaae54faa, 0xfb16edfb, 0x43c58643, 0x4dd79a4d, 0x33556633, 0x85941185, 0x45cf8a45, 0xf910e9f9, 0x02060402, 0x7f81fe7f, 0x50f0a050, 0x3c44783c, 0x9fba259f, 0xa8e34ba8, 0x51f3a251, 0xa3fe5da3, 0x40c08040, 0x8f8a058f, 0x92ad3f92, 0x9dbc219d, 0x38487038, 0xf504f1f5, 0xbcdf63bc, 0xb6c177b6, 0xda75afda, 0x21634221, 0x10302010, 0xff1ae5ff, 0xf30efdf3, 0xd26dbfd2, 0xcd4c81cd, 0x0c14180c, 0x13352613, 0xec2fc3ec, 0x5fe1be5f, 0x97a23597, 0x44cc8844, 0x17392e17, 0xc45793c4, 0xa7f255a7, 0x7e82fc7e, 0x3d477a3d, 0x64acc864, 0x5de7ba5d, 0x192b3219, 0x7395e673, 0x60a0c060, 0x81981981, 0x4fd19e4f, 0xdc7fa3dc, 0x22664422, 0x2a7e542a, 0x90ab3b90, 0x88830b88, 0x46ca8c46, 0xee29c7ee, 0xb8d36bb8, 0x143c2814, 0xde79a7de, 0x5ee2bc5e, 0x0b1d160b, 0xdb76addb, 0xe03bdbe0, 0x32566432, 0x3a4e743a, 0x0a1e140a, 0x49db9249, 0x060a0c06, 0x246c4824, 0x5ce4b85c, 0xc25d9fc2, 0xd36ebdd3, 0xacef43ac, 0x62a6c462, 0x91a83991, 0x95a43195, 0xe437d3e4, 0x798bf279, 0xe732d5e7, 0xc8438bc8, 0x37596e37, 0x6db7da6d, 0x8d8c018d, 0xd564b1d5, 0x4ed29c4e, 0xa9e049a9, 0x6cb4d86c, 0x56faac56, 0xf407f3f4, 0xea25cfea, 0x65afca65, 0x7a8ef47a, 0xaee947ae, 0x08181008, 0xbad56fba, 0x7888f078, 0x256f4a25, 0x2e725c2e, 0x1c24381c, 0xa6f157a6, 0xb4c773b4, 0xc65197c6, 0xe823cbe8, 0xdd7ca1dd, 0x749ce874, 0x1f213e1f, 0x4bdd964b, 0xbddc61bd, 0x8b860d8b, 0x8a850f8a, 0x7090e070, 0x3e427c3e, 0xb5c471b5, 0x66aacc66, 0x48d89048, 0x03050603, 0xf601f7f6, 0x0e121c0e, 0x61a3c261, 0x355f6a35, 0x57f9ae57, 0xb9d069b9, 0x86911786, 0xc15899c1, 0x1d273a1d, 0x9eb9279e, 0xe138d9e1, 0xf813ebf8, 0x98b32b98, 0x11332211, 0x69bbd269, 0xd970a9d9, 0x8e89078e, 0x94a73394, 0x9bb62d9b, 0x1e223c1e, 0x87921587, 0xe920c9e9, 0xce4987ce, 0x55ffaa55, 0x28785028, 0xdf7aa5df, 0x8c8f038c, 0xa1f859a1, 0x89800989, 0x0d171a0d, 0xbfda65bf, 0xe631d7e6, 0x42c68442, 0x68b8d068, 0x41c38241, 0x99b02999, 0x2d775a2d, 0x0f111e0f, 0xb0cb7bb0, 0x54fca854, 0xbbd66dbb, 0x163a2c16 ] - T4 = [ 0x6363a5c6, 0x7c7c84f8, 0x777799ee, 0x7b7b8df6, 0xf2f20dff, 0x6b6bbdd6, 0x6f6fb1de, 0xc5c55491, 0x30305060, 0x01010302, 0x6767a9ce, 0x2b2b7d56, 0xfefe19e7, 0xd7d762b5, 0xababe64d, 0x76769aec, 0xcaca458f, 0x82829d1f, 0xc9c94089, 0x7d7d87fa, 0xfafa15ef, 0x5959ebb2, 0x4747c98e, 0xf0f00bfb, 0xadadec41, 0xd4d467b3, 0xa2a2fd5f, 0xafafea45, 0x9c9cbf23, 0xa4a4f753, 0x727296e4, 0xc0c05b9b, 0xb7b7c275, 0xfdfd1ce1, 0x9393ae3d, 0x26266a4c, 0x36365a6c, 0x3f3f417e, 0xf7f702f5, 0xcccc4f83, 0x34345c68, 0xa5a5f451, 0xe5e534d1, 0xf1f108f9, 0x717193e2, 0xd8d873ab, 0x31315362, 0x15153f2a, 0x04040c08, 0xc7c75295, 0x23236546, 0xc3c35e9d, 0x18182830, 0x9696a137, 0x05050f0a, 0x9a9ab52f, 0x0707090e, 0x12123624, 0x80809b1b, 0xe2e23ddf, 0xebeb26cd, 0x2727694e, 0xb2b2cd7f, 0x75759fea, 0x09091b12, 0x83839e1d, 0x2c2c7458, 0x1a1a2e34, 0x1b1b2d36, 0x6e6eb2dc, 0x5a5aeeb4, 0xa0a0fb5b, 0x5252f6a4, 0x3b3b4d76, 0xd6d661b7, 0xb3b3ce7d, 0x29297b52, 0xe3e33edd, 0x2f2f715e, 0x84849713, 0x5353f5a6, 0xd1d168b9, 0x00000000, 0xeded2cc1, 0x20206040, 0xfcfc1fe3, 0xb1b1c879, 0x5b5bedb6, 0x6a6abed4, 0xcbcb468d, 0xbebed967, 0x39394b72, 0x4a4ade94, 0x4c4cd498, 0x5858e8b0, 0xcfcf4a85, 0xd0d06bbb, 0xefef2ac5, 0xaaaae54f, 0xfbfb16ed, 0x4343c586, 0x4d4dd79a, 0x33335566, 0x85859411, 0x4545cf8a, 0xf9f910e9, 0x02020604, 0x7f7f81fe, 0x5050f0a0, 0x3c3c4478, 0x9f9fba25, 0xa8a8e34b, 0x5151f3a2, 0xa3a3fe5d, 0x4040c080, 0x8f8f8a05, 0x9292ad3f, 0x9d9dbc21, 0x38384870, 0xf5f504f1, 0xbcbcdf63, 0xb6b6c177, 0xdada75af, 0x21216342, 0x10103020, 0xffff1ae5, 0xf3f30efd, 0xd2d26dbf, 0xcdcd4c81, 0x0c0c1418, 0x13133526, 0xecec2fc3, 0x5f5fe1be, 0x9797a235, 0x4444cc88, 0x1717392e, 0xc4c45793, 0xa7a7f255, 0x7e7e82fc, 0x3d3d477a, 0x6464acc8, 0x5d5de7ba, 0x19192b32, 0x737395e6, 0x6060a0c0, 0x81819819, 0x4f4fd19e, 0xdcdc7fa3, 0x22226644, 0x2a2a7e54, 0x9090ab3b, 0x8888830b, 0x4646ca8c, 0xeeee29c7, 0xb8b8d36b, 0x14143c28, 0xdede79a7, 0x5e5ee2bc, 0x0b0b1d16, 0xdbdb76ad, 0xe0e03bdb, 0x32325664, 0x3a3a4e74, 0x0a0a1e14, 0x4949db92, 0x06060a0c, 0x24246c48, 0x5c5ce4b8, 0xc2c25d9f, 0xd3d36ebd, 0xacacef43, 0x6262a6c4, 0x9191a839, 0x9595a431, 0xe4e437d3, 0x79798bf2, 0xe7e732d5, 0xc8c8438b, 0x3737596e, 0x6d6db7da, 0x8d8d8c01, 0xd5d564b1, 0x4e4ed29c, 0xa9a9e049, 0x6c6cb4d8, 0x5656faac, 0xf4f407f3, 0xeaea25cf, 0x6565afca, 0x7a7a8ef4, 0xaeaee947, 0x08081810, 0xbabad56f, 0x787888f0, 0x25256f4a, 0x2e2e725c, 0x1c1c2438, 0xa6a6f157, 0xb4b4c773, 0xc6c65197, 0xe8e823cb, 0xdddd7ca1, 0x74749ce8, 0x1f1f213e, 0x4b4bdd96, 0xbdbddc61, 0x8b8b860d, 0x8a8a850f, 0x707090e0, 0x3e3e427c, 0xb5b5c471, 0x6666aacc, 0x4848d890, 0x03030506, 0xf6f601f7, 0x0e0e121c, 0x6161a3c2, 0x35355f6a, 0x5757f9ae, 0xb9b9d069, 0x86869117, 0xc1c15899, 0x1d1d273a, 0x9e9eb927, 0xe1e138d9, 0xf8f813eb, 0x9898b32b, 0x11113322, 0x6969bbd2, 0xd9d970a9, 0x8e8e8907, 0x9494a733, 0x9b9bb62d, 0x1e1e223c, 0x87879215, 0xe9e920c9, 0xcece4987, 0x5555ffaa, 0x28287850, 0xdfdf7aa5, 0x8c8c8f03, 0xa1a1f859, 0x89898009, 0x0d0d171a, 0xbfbfda65, 0xe6e631d7, 0x4242c684, 0x6868b8d0, 0x4141c382, 0x9999b029, 0x2d2d775a, 0x0f0f111e, 0xb0b0cb7b, 0x5454fca8, 0xbbbbd66d, 0x16163a2c ] - - # Transformations for decryption - T5 = [ 0x51f4a750, 0x7e416553, 0x1a17a4c3, 0x3a275e96, 0x3bab6bcb, 0x1f9d45f1, 0xacfa58ab, 0x4be30393, 0x2030fa55, 0xad766df6, 0x88cc7691, 0xf5024c25, 0x4fe5d7fc, 0xc52acbd7, 0x26354480, 0xb562a38f, 0xdeb15a49, 0x25ba1b67, 0x45ea0e98, 0x5dfec0e1, 0xc32f7502, 0x814cf012, 0x8d4697a3, 0x6bd3f9c6, 0x038f5fe7, 0x15929c95, 0xbf6d7aeb, 0x955259da, 0xd4be832d, 0x587421d3, 0x49e06929, 0x8ec9c844, 0x75c2896a, 0xf48e7978, 0x99583e6b, 0x27b971dd, 0xbee14fb6, 0xf088ad17, 0xc920ac66, 0x7dce3ab4, 0x63df4a18, 0xe51a3182, 0x97513360, 0x62537f45, 0xb16477e0, 0xbb6bae84, 0xfe81a01c, 0xf9082b94, 0x70486858, 0x8f45fd19, 0x94de6c87, 0x527bf8b7, 0xab73d323, 0x724b02e2, 0xe31f8f57, 0x6655ab2a, 0xb2eb2807, 0x2fb5c203, 0x86c57b9a, 0xd33708a5, 0x302887f2, 0x23bfa5b2, 0x02036aba, 0xed16825c, 0x8acf1c2b, 0xa779b492, 0xf307f2f0, 0x4e69e2a1, 0x65daf4cd, 0x0605bed5, 0xd134621f, 0xc4a6fe8a, 0x342e539d, 0xa2f355a0, 0x058ae132, 0xa4f6eb75, 0x0b83ec39, 0x4060efaa, 0x5e719f06, 0xbd6e1051, 0x3e218af9, 0x96dd063d, 0xdd3e05ae, 0x4de6bd46, 0x91548db5, 0x71c45d05, 0x0406d46f, 0x605015ff, 0x1998fb24, 0xd6bde997, 0x894043cc, 0x67d99e77, 0xb0e842bd, 0x07898b88, 0xe7195b38, 0x79c8eedb, 0xa17c0a47, 0x7c420fe9, 0xf8841ec9, 0x00000000, 0x09808683, 0x322bed48, 0x1e1170ac, 0x6c5a724e, 0xfd0efffb, 0x0f853856, 0x3daed51e, 0x362d3927, 0x0a0fd964, 0x685ca621, 0x9b5b54d1, 0x24362e3a, 0x0c0a67b1, 0x9357e70f, 0xb4ee96d2, 0x1b9b919e, 0x80c0c54f, 0x61dc20a2, 0x5a774b69, 0x1c121a16, 0xe293ba0a, 0xc0a02ae5, 0x3c22e043, 0x121b171d, 0x0e090d0b, 0xf28bc7ad, 0x2db6a8b9, 0x141ea9c8, 0x57f11985, 0xaf75074c, 0xee99ddbb, 0xa37f60fd, 0xf701269f, 0x5c72f5bc, 0x44663bc5, 0x5bfb7e34, 0x8b432976, 0xcb23c6dc, 0xb6edfc68, 0xb8e4f163, 0xd731dcca, 0x42638510, 0x13972240, 0x84c61120, 0x854a247d, 0xd2bb3df8, 0xaef93211, 0xc729a16d, 0x1d9e2f4b, 0xdcb230f3, 0x0d8652ec, 0x77c1e3d0, 0x2bb3166c, 0xa970b999, 0x119448fa, 0x47e96422, 0xa8fc8cc4, 0xa0f03f1a, 0x567d2cd8, 0x223390ef, 0x87494ec7, 0xd938d1c1, 0x8ccaa2fe, 0x98d40b36, 0xa6f581cf, 0xa57ade28, 0xdab78e26, 0x3fadbfa4, 0x2c3a9de4, 0x5078920d, 0x6a5fcc9b, 0x547e4662, 0xf68d13c2, 0x90d8b8e8, 0x2e39f75e, 0x82c3aff5, 0x9f5d80be, 0x69d0937c, 0x6fd52da9, 0xcf2512b3, 0xc8ac993b, 0x10187da7, 0xe89c636e, 0xdb3bbb7b, 0xcd267809, 0x6e5918f4, 0xec9ab701, 0x834f9aa8, 0xe6956e65, 0xaaffe67e, 0x21bccf08, 0xef15e8e6, 0xbae79bd9, 0x4a6f36ce, 0xea9f09d4, 0x29b07cd6, 0x31a4b2af, 0x2a3f2331, 0xc6a59430, 0x35a266c0, 0x744ebc37, 0xfc82caa6, 0xe090d0b0, 0x33a7d815, 0xf104984a, 0x41ecdaf7, 0x7fcd500e, 0x1791f62f, 0x764dd68d, 0x43efb04d, 0xccaa4d54, 0xe49604df, 0x9ed1b5e3, 0x4c6a881b, 0xc12c1fb8, 0x4665517f, 0x9d5eea04, 0x018c355d, 0xfa877473, 0xfb0b412e, 0xb3671d5a, 0x92dbd252, 0xe9105633, 0x6dd64713, 0x9ad7618c, 0x37a10c7a, 0x59f8148e, 0xeb133c89, 0xcea927ee, 0xb761c935, 0xe11ce5ed, 0x7a47b13c, 0x9cd2df59, 0x55f2733f, 0x1814ce79, 0x73c737bf, 0x53f7cdea, 0x5ffdaa5b, 0xdf3d6f14, 0x7844db86, 0xcaaff381, 0xb968c43e, 0x3824342c, 0xc2a3405f, 0x161dc372, 0xbce2250c, 0x283c498b, 0xff0d9541, 0x39a80171, 0x080cb3de, 0xd8b4e49c, 0x6456c190, 0x7bcb8461, 0xd532b670, 0x486c5c74, 0xd0b85742 ] - T6 = [ 0x5051f4a7, 0x537e4165, 0xc31a17a4, 0x963a275e, 0xcb3bab6b, 0xf11f9d45, 0xabacfa58, 0x934be303, 0x552030fa, 0xf6ad766d, 0x9188cc76, 0x25f5024c, 0xfc4fe5d7, 0xd7c52acb, 0x80263544, 0x8fb562a3, 0x49deb15a, 0x6725ba1b, 0x9845ea0e, 0xe15dfec0, 0x02c32f75, 0x12814cf0, 0xa38d4697, 0xc66bd3f9, 0xe7038f5f, 0x9515929c, 0xebbf6d7a, 0xda955259, 0x2dd4be83, 0xd3587421, 0x2949e069, 0x448ec9c8, 0x6a75c289, 0x78f48e79, 0x6b99583e, 0xdd27b971, 0xb6bee14f, 0x17f088ad, 0x66c920ac, 0xb47dce3a, 0x1863df4a, 0x82e51a31, 0x60975133, 0x4562537f, 0xe0b16477, 0x84bb6bae, 0x1cfe81a0, 0x94f9082b, 0x58704868, 0x198f45fd, 0x8794de6c, 0xb7527bf8, 0x23ab73d3, 0xe2724b02, 0x57e31f8f, 0x2a6655ab, 0x07b2eb28, 0x032fb5c2, 0x9a86c57b, 0xa5d33708, 0xf2302887, 0xb223bfa5, 0xba02036a, 0x5ced1682, 0x2b8acf1c, 0x92a779b4, 0xf0f307f2, 0xa14e69e2, 0xcd65daf4, 0xd50605be, 0x1fd13462, 0x8ac4a6fe, 0x9d342e53, 0xa0a2f355, 0x32058ae1, 0x75a4f6eb, 0x390b83ec, 0xaa4060ef, 0x065e719f, 0x51bd6e10, 0xf93e218a, 0x3d96dd06, 0xaedd3e05, 0x464de6bd, 0xb591548d, 0x0571c45d, 0x6f0406d4, 0xff605015, 0x241998fb, 0x97d6bde9, 0xcc894043, 0x7767d99e, 0xbdb0e842, 0x8807898b, 0x38e7195b, 0xdb79c8ee, 0x47a17c0a, 0xe97c420f, 0xc9f8841e, 0x00000000, 0x83098086, 0x48322bed, 0xac1e1170, 0x4e6c5a72, 0xfbfd0eff, 0x560f8538, 0x1e3daed5, 0x27362d39, 0x640a0fd9, 0x21685ca6, 0xd19b5b54, 0x3a24362e, 0xb10c0a67, 0x0f9357e7, 0xd2b4ee96, 0x9e1b9b91, 0x4f80c0c5, 0xa261dc20, 0x695a774b, 0x161c121a, 0x0ae293ba, 0xe5c0a02a, 0x433c22e0, 0x1d121b17, 0x0b0e090d, 0xadf28bc7, 0xb92db6a8, 0xc8141ea9, 0x8557f119, 0x4caf7507, 0xbbee99dd, 0xfda37f60, 0x9ff70126, 0xbc5c72f5, 0xc544663b, 0x345bfb7e, 0x768b4329, 0xdccb23c6, 0x68b6edfc, 0x63b8e4f1, 0xcad731dc, 0x10426385, 0x40139722, 0x2084c611, 0x7d854a24, 0xf8d2bb3d, 0x11aef932, 0x6dc729a1, 0x4b1d9e2f, 0xf3dcb230, 0xec0d8652, 0xd077c1e3, 0x6c2bb316, 0x99a970b9, 0xfa119448, 0x2247e964, 0xc4a8fc8c, 0x1aa0f03f, 0xd8567d2c, 0xef223390, 0xc787494e, 0xc1d938d1, 0xfe8ccaa2, 0x3698d40b, 0xcfa6f581, 0x28a57ade, 0x26dab78e, 0xa43fadbf, 0xe42c3a9d, 0x0d507892, 0x9b6a5fcc, 0x62547e46, 0xc2f68d13, 0xe890d8b8, 0x5e2e39f7, 0xf582c3af, 0xbe9f5d80, 0x7c69d093, 0xa96fd52d, 0xb3cf2512, 0x3bc8ac99, 0xa710187d, 0x6ee89c63, 0x7bdb3bbb, 0x09cd2678, 0xf46e5918, 0x01ec9ab7, 0xa8834f9a, 0x65e6956e, 0x7eaaffe6, 0x0821bccf, 0xe6ef15e8, 0xd9bae79b, 0xce4a6f36, 0xd4ea9f09, 0xd629b07c, 0xaf31a4b2, 0x312a3f23, 0x30c6a594, 0xc035a266, 0x37744ebc, 0xa6fc82ca, 0xb0e090d0, 0x1533a7d8, 0x4af10498, 0xf741ecda, 0x0e7fcd50, 0x2f1791f6, 0x8d764dd6, 0x4d43efb0, 0x54ccaa4d, 0xdfe49604, 0xe39ed1b5, 0x1b4c6a88, 0xb8c12c1f, 0x7f466551, 0x049d5eea, 0x5d018c35, 0x73fa8774, 0x2efb0b41, 0x5ab3671d, 0x5292dbd2, 0x33e91056, 0x136dd647, 0x8c9ad761, 0x7a37a10c, 0x8e59f814, 0x89eb133c, 0xeecea927, 0x35b761c9, 0xede11ce5, 0x3c7a47b1, 0x599cd2df, 0x3f55f273, 0x791814ce, 0xbf73c737, 0xea53f7cd, 0x5b5ffdaa, 0x14df3d6f, 0x867844db, 0x81caaff3, 0x3eb968c4, 0x2c382434, 0x5fc2a340, 0x72161dc3, 0x0cbce225, 0x8b283c49, 0x41ff0d95, 0x7139a801, 0xde080cb3, 0x9cd8b4e4, 0x906456c1, 0x617bcb84, 0x70d532b6, 0x74486c5c, 0x42d0b857 ] - T7 = [ 0xa75051f4, 0x65537e41, 0xa4c31a17, 0x5e963a27, 0x6bcb3bab, 0x45f11f9d, 0x58abacfa, 0x03934be3, 0xfa552030, 0x6df6ad76, 0x769188cc, 0x4c25f502, 0xd7fc4fe5, 0xcbd7c52a, 0x44802635, 0xa38fb562, 0x5a49deb1, 0x1b6725ba, 0x0e9845ea, 0xc0e15dfe, 0x7502c32f, 0xf012814c, 0x97a38d46, 0xf9c66bd3, 0x5fe7038f, 0x9c951592, 0x7aebbf6d, 0x59da9552, 0x832dd4be, 0x21d35874, 0x692949e0, 0xc8448ec9, 0x896a75c2, 0x7978f48e, 0x3e6b9958, 0x71dd27b9, 0x4fb6bee1, 0xad17f088, 0xac66c920, 0x3ab47dce, 0x4a1863df, 0x3182e51a, 0x33609751, 0x7f456253, 0x77e0b164, 0xae84bb6b, 0xa01cfe81, 0x2b94f908, 0x68587048, 0xfd198f45, 0x6c8794de, 0xf8b7527b, 0xd323ab73, 0x02e2724b, 0x8f57e31f, 0xab2a6655, 0x2807b2eb, 0xc2032fb5, 0x7b9a86c5, 0x08a5d337, 0x87f23028, 0xa5b223bf, 0x6aba0203, 0x825ced16, 0x1c2b8acf, 0xb492a779, 0xf2f0f307, 0xe2a14e69, 0xf4cd65da, 0xbed50605, 0x621fd134, 0xfe8ac4a6, 0x539d342e, 0x55a0a2f3, 0xe132058a, 0xeb75a4f6, 0xec390b83, 0xefaa4060, 0x9f065e71, 0x1051bd6e, 0x8af93e21, 0x063d96dd, 0x05aedd3e, 0xbd464de6, 0x8db59154, 0x5d0571c4, 0xd46f0406, 0x15ff6050, 0xfb241998, 0xe997d6bd, 0x43cc8940, 0x9e7767d9, 0x42bdb0e8, 0x8b880789, 0x5b38e719, 0xeedb79c8, 0x0a47a17c, 0x0fe97c42, 0x1ec9f884, 0x00000000, 0x86830980, 0xed48322b, 0x70ac1e11, 0x724e6c5a, 0xfffbfd0e, 0x38560f85, 0xd51e3dae, 0x3927362d, 0xd9640a0f, 0xa621685c, 0x54d19b5b, 0x2e3a2436, 0x67b10c0a, 0xe70f9357, 0x96d2b4ee, 0x919e1b9b, 0xc54f80c0, 0x20a261dc, 0x4b695a77, 0x1a161c12, 0xba0ae293, 0x2ae5c0a0, 0xe0433c22, 0x171d121b, 0x0d0b0e09, 0xc7adf28b, 0xa8b92db6, 0xa9c8141e, 0x198557f1, 0x074caf75, 0xddbbee99, 0x60fda37f, 0x269ff701, 0xf5bc5c72, 0x3bc54466, 0x7e345bfb, 0x29768b43, 0xc6dccb23, 0xfc68b6ed, 0xf163b8e4, 0xdccad731, 0x85104263, 0x22401397, 0x112084c6, 0x247d854a, 0x3df8d2bb, 0x3211aef9, 0xa16dc729, 0x2f4b1d9e, 0x30f3dcb2, 0x52ec0d86, 0xe3d077c1, 0x166c2bb3, 0xb999a970, 0x48fa1194, 0x642247e9, 0x8cc4a8fc, 0x3f1aa0f0, 0x2cd8567d, 0x90ef2233, 0x4ec78749, 0xd1c1d938, 0xa2fe8cca, 0x0b3698d4, 0x81cfa6f5, 0xde28a57a, 0x8e26dab7, 0xbfa43fad, 0x9de42c3a, 0x920d5078, 0xcc9b6a5f, 0x4662547e, 0x13c2f68d, 0xb8e890d8, 0xf75e2e39, 0xaff582c3, 0x80be9f5d, 0x937c69d0, 0x2da96fd5, 0x12b3cf25, 0x993bc8ac, 0x7da71018, 0x636ee89c, 0xbb7bdb3b, 0x7809cd26, 0x18f46e59, 0xb701ec9a, 0x9aa8834f, 0x6e65e695, 0xe67eaaff, 0xcf0821bc, 0xe8e6ef15, 0x9bd9bae7, 0x36ce4a6f, 0x09d4ea9f, 0x7cd629b0, 0xb2af31a4, 0x23312a3f, 0x9430c6a5, 0x66c035a2, 0xbc37744e, 0xcaa6fc82, 0xd0b0e090, 0xd81533a7, 0x984af104, 0xdaf741ec, 0x500e7fcd, 0xf62f1791, 0xd68d764d, 0xb04d43ef, 0x4d54ccaa, 0x04dfe496, 0xb5e39ed1, 0x881b4c6a, 0x1fb8c12c, 0x517f4665, 0xea049d5e, 0x355d018c, 0x7473fa87, 0x412efb0b, 0x1d5ab367, 0xd25292db, 0x5633e910, 0x47136dd6, 0x618c9ad7, 0x0c7a37a1, 0x148e59f8, 0x3c89eb13, 0x27eecea9, 0xc935b761, 0xe5ede11c, 0xb13c7a47, 0xdf599cd2, 0x733f55f2, 0xce791814, 0x37bf73c7, 0xcdea53f7, 0xaa5b5ffd, 0x6f14df3d, 0xdb867844, 0xf381caaf, 0xc43eb968, 0x342c3824, 0x405fc2a3, 0xc372161d, 0x250cbce2, 0x498b283c, 0x9541ff0d, 0x017139a8, 0xb3de080c, 0xe49cd8b4, 0xc1906456, 0x84617bcb, 0xb670d532, 0x5c74486c, 0x5742d0b8 ] - T8 = [ 0xf4a75051, 0x4165537e, 0x17a4c31a, 0x275e963a, 0xab6bcb3b, 0x9d45f11f, 0xfa58abac, 0xe303934b, 0x30fa5520, 0x766df6ad, 0xcc769188, 0x024c25f5, 0xe5d7fc4f, 0x2acbd7c5, 0x35448026, 0x62a38fb5, 0xb15a49de, 0xba1b6725, 0xea0e9845, 0xfec0e15d, 0x2f7502c3, 0x4cf01281, 0x4697a38d, 0xd3f9c66b, 0x8f5fe703, 0x929c9515, 0x6d7aebbf, 0x5259da95, 0xbe832dd4, 0x7421d358, 0xe0692949, 0xc9c8448e, 0xc2896a75, 0x8e7978f4, 0x583e6b99, 0xb971dd27, 0xe14fb6be, 0x88ad17f0, 0x20ac66c9, 0xce3ab47d, 0xdf4a1863, 0x1a3182e5, 0x51336097, 0x537f4562, 0x6477e0b1, 0x6bae84bb, 0x81a01cfe, 0x082b94f9, 0x48685870, 0x45fd198f, 0xde6c8794, 0x7bf8b752, 0x73d323ab, 0x4b02e272, 0x1f8f57e3, 0x55ab2a66, 0xeb2807b2, 0xb5c2032f, 0xc57b9a86, 0x3708a5d3, 0x2887f230, 0xbfa5b223, 0x036aba02, 0x16825ced, 0xcf1c2b8a, 0x79b492a7, 0x07f2f0f3, 0x69e2a14e, 0xdaf4cd65, 0x05bed506, 0x34621fd1, 0xa6fe8ac4, 0x2e539d34, 0xf355a0a2, 0x8ae13205, 0xf6eb75a4, 0x83ec390b, 0x60efaa40, 0x719f065e, 0x6e1051bd, 0x218af93e, 0xdd063d96, 0x3e05aedd, 0xe6bd464d, 0x548db591, 0xc45d0571, 0x06d46f04, 0x5015ff60, 0x98fb2419, 0xbde997d6, 0x4043cc89, 0xd99e7767, 0xe842bdb0, 0x898b8807, 0x195b38e7, 0xc8eedb79, 0x7c0a47a1, 0x420fe97c, 0x841ec9f8, 0x00000000, 0x80868309, 0x2bed4832, 0x1170ac1e, 0x5a724e6c, 0x0efffbfd, 0x8538560f, 0xaed51e3d, 0x2d392736, 0x0fd9640a, 0x5ca62168, 0x5b54d19b, 0x362e3a24, 0x0a67b10c, 0x57e70f93, 0xee96d2b4, 0x9b919e1b, 0xc0c54f80, 0xdc20a261, 0x774b695a, 0x121a161c, 0x93ba0ae2, 0xa02ae5c0, 0x22e0433c, 0x1b171d12, 0x090d0b0e, 0x8bc7adf2, 0xb6a8b92d, 0x1ea9c814, 0xf1198557, 0x75074caf, 0x99ddbbee, 0x7f60fda3, 0x01269ff7, 0x72f5bc5c, 0x663bc544, 0xfb7e345b, 0x4329768b, 0x23c6dccb, 0xedfc68b6, 0xe4f163b8, 0x31dccad7, 0x63851042, 0x97224013, 0xc6112084, 0x4a247d85, 0xbb3df8d2, 0xf93211ae, 0x29a16dc7, 0x9e2f4b1d, 0xb230f3dc, 0x8652ec0d, 0xc1e3d077, 0xb3166c2b, 0x70b999a9, 0x9448fa11, 0xe9642247, 0xfc8cc4a8, 0xf03f1aa0, 0x7d2cd856, 0x3390ef22, 0x494ec787, 0x38d1c1d9, 0xcaa2fe8c, 0xd40b3698, 0xf581cfa6, 0x7ade28a5, 0xb78e26da, 0xadbfa43f, 0x3a9de42c, 0x78920d50, 0x5fcc9b6a, 0x7e466254, 0x8d13c2f6, 0xd8b8e890, 0x39f75e2e, 0xc3aff582, 0x5d80be9f, 0xd0937c69, 0xd52da96f, 0x2512b3cf, 0xac993bc8, 0x187da710, 0x9c636ee8, 0x3bbb7bdb, 0x267809cd, 0x5918f46e, 0x9ab701ec, 0x4f9aa883, 0x956e65e6, 0xffe67eaa, 0xbccf0821, 0x15e8e6ef, 0xe79bd9ba, 0x6f36ce4a, 0x9f09d4ea, 0xb07cd629, 0xa4b2af31, 0x3f23312a, 0xa59430c6, 0xa266c035, 0x4ebc3774, 0x82caa6fc, 0x90d0b0e0, 0xa7d81533, 0x04984af1, 0xecdaf741, 0xcd500e7f, 0x91f62f17, 0x4dd68d76, 0xefb04d43, 0xaa4d54cc, 0x9604dfe4, 0xd1b5e39e, 0x6a881b4c, 0x2c1fb8c1, 0x65517f46, 0x5eea049d, 0x8c355d01, 0x877473fa, 0x0b412efb, 0x671d5ab3, 0xdbd25292, 0x105633e9, 0xd647136d, 0xd7618c9a, 0xa10c7a37, 0xf8148e59, 0x133c89eb, 0xa927eece, 0x61c935b7, 0x1ce5ede1, 0x47b13c7a, 0xd2df599c, 0xf2733f55, 0x14ce7918, 0xc737bf73, 0xf7cdea53, 0xfdaa5b5f, 0x3d6f14df, 0x44db8678, 0xaff381ca, 0x68c43eb9, 0x24342c38, 0xa3405fc2, 0x1dc37216, 0xe2250cbc, 0x3c498b28, 0x0d9541ff, 0xa8017139, 0x0cb3de08, 0xb4e49cd8, 0x56c19064, 0xcb84617b, 0x32b670d5, 0x6c5c7448, 0xb85742d0 ] - - # Transformations for decryption key expansion - U1 = [ 0x00000000, 0x0e090d0b, 0x1c121a16, 0x121b171d, 0x3824342c, 0x362d3927, 0x24362e3a, 0x2a3f2331, 0x70486858, 0x7e416553, 0x6c5a724e, 0x62537f45, 0x486c5c74, 0x4665517f, 0x547e4662, 0x5a774b69, 0xe090d0b0, 0xee99ddbb, 0xfc82caa6, 0xf28bc7ad, 0xd8b4e49c, 0xd6bde997, 0xc4a6fe8a, 0xcaaff381, 0x90d8b8e8, 0x9ed1b5e3, 0x8ccaa2fe, 0x82c3aff5, 0xa8fc8cc4, 0xa6f581cf, 0xb4ee96d2, 0xbae79bd9, 0xdb3bbb7b, 0xd532b670, 0xc729a16d, 0xc920ac66, 0xe31f8f57, 0xed16825c, 0xff0d9541, 0xf104984a, 0xab73d323, 0xa57ade28, 0xb761c935, 0xb968c43e, 0x9357e70f, 0x9d5eea04, 0x8f45fd19, 0x814cf012, 0x3bab6bcb, 0x35a266c0, 0x27b971dd, 0x29b07cd6, 0x038f5fe7, 0x0d8652ec, 0x1f9d45f1, 0x119448fa, 0x4be30393, 0x45ea0e98, 0x57f11985, 0x59f8148e, 0x73c737bf, 0x7dce3ab4, 0x6fd52da9, 0x61dc20a2, 0xad766df6, 0xa37f60fd, 0xb16477e0, 0xbf6d7aeb, 0x955259da, 0x9b5b54d1, 0x894043cc, 0x87494ec7, 0xdd3e05ae, 0xd33708a5, 0xc12c1fb8, 0xcf2512b3, 0xe51a3182, 0xeb133c89, 0xf9082b94, 0xf701269f, 0x4de6bd46, 0x43efb04d, 0x51f4a750, 0x5ffdaa5b, 0x75c2896a, 0x7bcb8461, 0x69d0937c, 0x67d99e77, 0x3daed51e, 0x33a7d815, 0x21bccf08, 0x2fb5c203, 0x058ae132, 0x0b83ec39, 0x1998fb24, 0x1791f62f, 0x764dd68d, 0x7844db86, 0x6a5fcc9b, 0x6456c190, 0x4e69e2a1, 0x4060efaa, 0x527bf8b7, 0x5c72f5bc, 0x0605bed5, 0x080cb3de, 0x1a17a4c3, 0x141ea9c8, 0x3e218af9, 0x302887f2, 0x223390ef, 0x2c3a9de4, 0x96dd063d, 0x98d40b36, 0x8acf1c2b, 0x84c61120, 0xaef93211, 0xa0f03f1a, 0xb2eb2807, 0xbce2250c, 0xe6956e65, 0xe89c636e, 0xfa877473, 0xf48e7978, 0xdeb15a49, 0xd0b85742, 0xc2a3405f, 0xccaa4d54, 0x41ecdaf7, 0x4fe5d7fc, 0x5dfec0e1, 0x53f7cdea, 0x79c8eedb, 0x77c1e3d0, 0x65daf4cd, 0x6bd3f9c6, 0x31a4b2af, 0x3fadbfa4, 0x2db6a8b9, 0x23bfa5b2, 0x09808683, 0x07898b88, 0x15929c95, 0x1b9b919e, 0xa17c0a47, 0xaf75074c, 0xbd6e1051, 0xb3671d5a, 0x99583e6b, 0x97513360, 0x854a247d, 0x8b432976, 0xd134621f, 0xdf3d6f14, 0xcd267809, 0xc32f7502, 0xe9105633, 0xe7195b38, 0xf5024c25, 0xfb0b412e, 0x9ad7618c, 0x94de6c87, 0x86c57b9a, 0x88cc7691, 0xa2f355a0, 0xacfa58ab, 0xbee14fb6, 0xb0e842bd, 0xea9f09d4, 0xe49604df, 0xf68d13c2, 0xf8841ec9, 0xd2bb3df8, 0xdcb230f3, 0xcea927ee, 0xc0a02ae5, 0x7a47b13c, 0x744ebc37, 0x6655ab2a, 0x685ca621, 0x42638510, 0x4c6a881b, 0x5e719f06, 0x5078920d, 0x0a0fd964, 0x0406d46f, 0x161dc372, 0x1814ce79, 0x322bed48, 0x3c22e043, 0x2e39f75e, 0x2030fa55, 0xec9ab701, 0xe293ba0a, 0xf088ad17, 0xfe81a01c, 0xd4be832d, 0xdab78e26, 0xc8ac993b, 0xc6a59430, 0x9cd2df59, 0x92dbd252, 0x80c0c54f, 0x8ec9c844, 0xa4f6eb75, 0xaaffe67e, 0xb8e4f163, 0xb6edfc68, 0x0c0a67b1, 0x02036aba, 0x10187da7, 0x1e1170ac, 0x342e539d, 0x3a275e96, 0x283c498b, 0x26354480, 0x7c420fe9, 0x724b02e2, 0x605015ff, 0x6e5918f4, 0x44663bc5, 0x4a6f36ce, 0x587421d3, 0x567d2cd8, 0x37a10c7a, 0x39a80171, 0x2bb3166c, 0x25ba1b67, 0x0f853856, 0x018c355d, 0x13972240, 0x1d9e2f4b, 0x47e96422, 0x49e06929, 0x5bfb7e34, 0x55f2733f, 0x7fcd500e, 0x71c45d05, 0x63df4a18, 0x6dd64713, 0xd731dcca, 0xd938d1c1, 0xcb23c6dc, 0xc52acbd7, 0xef15e8e6, 0xe11ce5ed, 0xf307f2f0, 0xfd0efffb, 0xa779b492, 0xa970b999, 0xbb6bae84, 0xb562a38f, 0x9f5d80be, 0x91548db5, 0x834f9aa8, 0x8d4697a3 ] - U2 = [ 0x00000000, 0x0b0e090d, 0x161c121a, 0x1d121b17, 0x2c382434, 0x27362d39, 0x3a24362e, 0x312a3f23, 0x58704868, 0x537e4165, 0x4e6c5a72, 0x4562537f, 0x74486c5c, 0x7f466551, 0x62547e46, 0x695a774b, 0xb0e090d0, 0xbbee99dd, 0xa6fc82ca, 0xadf28bc7, 0x9cd8b4e4, 0x97d6bde9, 0x8ac4a6fe, 0x81caaff3, 0xe890d8b8, 0xe39ed1b5, 0xfe8ccaa2, 0xf582c3af, 0xc4a8fc8c, 0xcfa6f581, 0xd2b4ee96, 0xd9bae79b, 0x7bdb3bbb, 0x70d532b6, 0x6dc729a1, 0x66c920ac, 0x57e31f8f, 0x5ced1682, 0x41ff0d95, 0x4af10498, 0x23ab73d3, 0x28a57ade, 0x35b761c9, 0x3eb968c4, 0x0f9357e7, 0x049d5eea, 0x198f45fd, 0x12814cf0, 0xcb3bab6b, 0xc035a266, 0xdd27b971, 0xd629b07c, 0xe7038f5f, 0xec0d8652, 0xf11f9d45, 0xfa119448, 0x934be303, 0x9845ea0e, 0x8557f119, 0x8e59f814, 0xbf73c737, 0xb47dce3a, 0xa96fd52d, 0xa261dc20, 0xf6ad766d, 0xfda37f60, 0xe0b16477, 0xebbf6d7a, 0xda955259, 0xd19b5b54, 0xcc894043, 0xc787494e, 0xaedd3e05, 0xa5d33708, 0xb8c12c1f, 0xb3cf2512, 0x82e51a31, 0x89eb133c, 0x94f9082b, 0x9ff70126, 0x464de6bd, 0x4d43efb0, 0x5051f4a7, 0x5b5ffdaa, 0x6a75c289, 0x617bcb84, 0x7c69d093, 0x7767d99e, 0x1e3daed5, 0x1533a7d8, 0x0821bccf, 0x032fb5c2, 0x32058ae1, 0x390b83ec, 0x241998fb, 0x2f1791f6, 0x8d764dd6, 0x867844db, 0x9b6a5fcc, 0x906456c1, 0xa14e69e2, 0xaa4060ef, 0xb7527bf8, 0xbc5c72f5, 0xd50605be, 0xde080cb3, 0xc31a17a4, 0xc8141ea9, 0xf93e218a, 0xf2302887, 0xef223390, 0xe42c3a9d, 0x3d96dd06, 0x3698d40b, 0x2b8acf1c, 0x2084c611, 0x11aef932, 0x1aa0f03f, 0x07b2eb28, 0x0cbce225, 0x65e6956e, 0x6ee89c63, 0x73fa8774, 0x78f48e79, 0x49deb15a, 0x42d0b857, 0x5fc2a340, 0x54ccaa4d, 0xf741ecda, 0xfc4fe5d7, 0xe15dfec0, 0xea53f7cd, 0xdb79c8ee, 0xd077c1e3, 0xcd65daf4, 0xc66bd3f9, 0xaf31a4b2, 0xa43fadbf, 0xb92db6a8, 0xb223bfa5, 0x83098086, 0x8807898b, 0x9515929c, 0x9e1b9b91, 0x47a17c0a, 0x4caf7507, 0x51bd6e10, 0x5ab3671d, 0x6b99583e, 0x60975133, 0x7d854a24, 0x768b4329, 0x1fd13462, 0x14df3d6f, 0x09cd2678, 0x02c32f75, 0x33e91056, 0x38e7195b, 0x25f5024c, 0x2efb0b41, 0x8c9ad761, 0x8794de6c, 0x9a86c57b, 0x9188cc76, 0xa0a2f355, 0xabacfa58, 0xb6bee14f, 0xbdb0e842, 0xd4ea9f09, 0xdfe49604, 0xc2f68d13, 0xc9f8841e, 0xf8d2bb3d, 0xf3dcb230, 0xeecea927, 0xe5c0a02a, 0x3c7a47b1, 0x37744ebc, 0x2a6655ab, 0x21685ca6, 0x10426385, 0x1b4c6a88, 0x065e719f, 0x0d507892, 0x640a0fd9, 0x6f0406d4, 0x72161dc3, 0x791814ce, 0x48322bed, 0x433c22e0, 0x5e2e39f7, 0x552030fa, 0x01ec9ab7, 0x0ae293ba, 0x17f088ad, 0x1cfe81a0, 0x2dd4be83, 0x26dab78e, 0x3bc8ac99, 0x30c6a594, 0x599cd2df, 0x5292dbd2, 0x4f80c0c5, 0x448ec9c8, 0x75a4f6eb, 0x7eaaffe6, 0x63b8e4f1, 0x68b6edfc, 0xb10c0a67, 0xba02036a, 0xa710187d, 0xac1e1170, 0x9d342e53, 0x963a275e, 0x8b283c49, 0x80263544, 0xe97c420f, 0xe2724b02, 0xff605015, 0xf46e5918, 0xc544663b, 0xce4a6f36, 0xd3587421, 0xd8567d2c, 0x7a37a10c, 0x7139a801, 0x6c2bb316, 0x6725ba1b, 0x560f8538, 0x5d018c35, 0x40139722, 0x4b1d9e2f, 0x2247e964, 0x2949e069, 0x345bfb7e, 0x3f55f273, 0x0e7fcd50, 0x0571c45d, 0x1863df4a, 0x136dd647, 0xcad731dc, 0xc1d938d1, 0xdccb23c6, 0xd7c52acb, 0xe6ef15e8, 0xede11ce5, 0xf0f307f2, 0xfbfd0eff, 0x92a779b4, 0x99a970b9, 0x84bb6bae, 0x8fb562a3, 0xbe9f5d80, 0xb591548d, 0xa8834f9a, 0xa38d4697 ] - U3 = [ 0x00000000, 0x0d0b0e09, 0x1a161c12, 0x171d121b, 0x342c3824, 0x3927362d, 0x2e3a2436, 0x23312a3f, 0x68587048, 0x65537e41, 0x724e6c5a, 0x7f456253, 0x5c74486c, 0x517f4665, 0x4662547e, 0x4b695a77, 0xd0b0e090, 0xddbbee99, 0xcaa6fc82, 0xc7adf28b, 0xe49cd8b4, 0xe997d6bd, 0xfe8ac4a6, 0xf381caaf, 0xb8e890d8, 0xb5e39ed1, 0xa2fe8cca, 0xaff582c3, 0x8cc4a8fc, 0x81cfa6f5, 0x96d2b4ee, 0x9bd9bae7, 0xbb7bdb3b, 0xb670d532, 0xa16dc729, 0xac66c920, 0x8f57e31f, 0x825ced16, 0x9541ff0d, 0x984af104, 0xd323ab73, 0xde28a57a, 0xc935b761, 0xc43eb968, 0xe70f9357, 0xea049d5e, 0xfd198f45, 0xf012814c, 0x6bcb3bab, 0x66c035a2, 0x71dd27b9, 0x7cd629b0, 0x5fe7038f, 0x52ec0d86, 0x45f11f9d, 0x48fa1194, 0x03934be3, 0x0e9845ea, 0x198557f1, 0x148e59f8, 0x37bf73c7, 0x3ab47dce, 0x2da96fd5, 0x20a261dc, 0x6df6ad76, 0x60fda37f, 0x77e0b164, 0x7aebbf6d, 0x59da9552, 0x54d19b5b, 0x43cc8940, 0x4ec78749, 0x05aedd3e, 0x08a5d337, 0x1fb8c12c, 0x12b3cf25, 0x3182e51a, 0x3c89eb13, 0x2b94f908, 0x269ff701, 0xbd464de6, 0xb04d43ef, 0xa75051f4, 0xaa5b5ffd, 0x896a75c2, 0x84617bcb, 0x937c69d0, 0x9e7767d9, 0xd51e3dae, 0xd81533a7, 0xcf0821bc, 0xc2032fb5, 0xe132058a, 0xec390b83, 0xfb241998, 0xf62f1791, 0xd68d764d, 0xdb867844, 0xcc9b6a5f, 0xc1906456, 0xe2a14e69, 0xefaa4060, 0xf8b7527b, 0xf5bc5c72, 0xbed50605, 0xb3de080c, 0xa4c31a17, 0xa9c8141e, 0x8af93e21, 0x87f23028, 0x90ef2233, 0x9de42c3a, 0x063d96dd, 0x0b3698d4, 0x1c2b8acf, 0x112084c6, 0x3211aef9, 0x3f1aa0f0, 0x2807b2eb, 0x250cbce2, 0x6e65e695, 0x636ee89c, 0x7473fa87, 0x7978f48e, 0x5a49deb1, 0x5742d0b8, 0x405fc2a3, 0x4d54ccaa, 0xdaf741ec, 0xd7fc4fe5, 0xc0e15dfe, 0xcdea53f7, 0xeedb79c8, 0xe3d077c1, 0xf4cd65da, 0xf9c66bd3, 0xb2af31a4, 0xbfa43fad, 0xa8b92db6, 0xa5b223bf, 0x86830980, 0x8b880789, 0x9c951592, 0x919e1b9b, 0x0a47a17c, 0x074caf75, 0x1051bd6e, 0x1d5ab367, 0x3e6b9958, 0x33609751, 0x247d854a, 0x29768b43, 0x621fd134, 0x6f14df3d, 0x7809cd26, 0x7502c32f, 0x5633e910, 0x5b38e719, 0x4c25f502, 0x412efb0b, 0x618c9ad7, 0x6c8794de, 0x7b9a86c5, 0x769188cc, 0x55a0a2f3, 0x58abacfa, 0x4fb6bee1, 0x42bdb0e8, 0x09d4ea9f, 0x04dfe496, 0x13c2f68d, 0x1ec9f884, 0x3df8d2bb, 0x30f3dcb2, 0x27eecea9, 0x2ae5c0a0, 0xb13c7a47, 0xbc37744e, 0xab2a6655, 0xa621685c, 0x85104263, 0x881b4c6a, 0x9f065e71, 0x920d5078, 0xd9640a0f, 0xd46f0406, 0xc372161d, 0xce791814, 0xed48322b, 0xe0433c22, 0xf75e2e39, 0xfa552030, 0xb701ec9a, 0xba0ae293, 0xad17f088, 0xa01cfe81, 0x832dd4be, 0x8e26dab7, 0x993bc8ac, 0x9430c6a5, 0xdf599cd2, 0xd25292db, 0xc54f80c0, 0xc8448ec9, 0xeb75a4f6, 0xe67eaaff, 0xf163b8e4, 0xfc68b6ed, 0x67b10c0a, 0x6aba0203, 0x7da71018, 0x70ac1e11, 0x539d342e, 0x5e963a27, 0x498b283c, 0x44802635, 0x0fe97c42, 0x02e2724b, 0x15ff6050, 0x18f46e59, 0x3bc54466, 0x36ce4a6f, 0x21d35874, 0x2cd8567d, 0x0c7a37a1, 0x017139a8, 0x166c2bb3, 0x1b6725ba, 0x38560f85, 0x355d018c, 0x22401397, 0x2f4b1d9e, 0x642247e9, 0x692949e0, 0x7e345bfb, 0x733f55f2, 0x500e7fcd, 0x5d0571c4, 0x4a1863df, 0x47136dd6, 0xdccad731, 0xd1c1d938, 0xc6dccb23, 0xcbd7c52a, 0xe8e6ef15, 0xe5ede11c, 0xf2f0f307, 0xfffbfd0e, 0xb492a779, 0xb999a970, 0xae84bb6b, 0xa38fb562, 0x80be9f5d, 0x8db59154, 0x9aa8834f, 0x97a38d46 ] - U4 = [ 0x00000000, 0x090d0b0e, 0x121a161c, 0x1b171d12, 0x24342c38, 0x2d392736, 0x362e3a24, 0x3f23312a, 0x48685870, 0x4165537e, 0x5a724e6c, 0x537f4562, 0x6c5c7448, 0x65517f46, 0x7e466254, 0x774b695a, 0x90d0b0e0, 0x99ddbbee, 0x82caa6fc, 0x8bc7adf2, 0xb4e49cd8, 0xbde997d6, 0xa6fe8ac4, 0xaff381ca, 0xd8b8e890, 0xd1b5e39e, 0xcaa2fe8c, 0xc3aff582, 0xfc8cc4a8, 0xf581cfa6, 0xee96d2b4, 0xe79bd9ba, 0x3bbb7bdb, 0x32b670d5, 0x29a16dc7, 0x20ac66c9, 0x1f8f57e3, 0x16825ced, 0x0d9541ff, 0x04984af1, 0x73d323ab, 0x7ade28a5, 0x61c935b7, 0x68c43eb9, 0x57e70f93, 0x5eea049d, 0x45fd198f, 0x4cf01281, 0xab6bcb3b, 0xa266c035, 0xb971dd27, 0xb07cd629, 0x8f5fe703, 0x8652ec0d, 0x9d45f11f, 0x9448fa11, 0xe303934b, 0xea0e9845, 0xf1198557, 0xf8148e59, 0xc737bf73, 0xce3ab47d, 0xd52da96f, 0xdc20a261, 0x766df6ad, 0x7f60fda3, 0x6477e0b1, 0x6d7aebbf, 0x5259da95, 0x5b54d19b, 0x4043cc89, 0x494ec787, 0x3e05aedd, 0x3708a5d3, 0x2c1fb8c1, 0x2512b3cf, 0x1a3182e5, 0x133c89eb, 0x082b94f9, 0x01269ff7, 0xe6bd464d, 0xefb04d43, 0xf4a75051, 0xfdaa5b5f, 0xc2896a75, 0xcb84617b, 0xd0937c69, 0xd99e7767, 0xaed51e3d, 0xa7d81533, 0xbccf0821, 0xb5c2032f, 0x8ae13205, 0x83ec390b, 0x98fb2419, 0x91f62f17, 0x4dd68d76, 0x44db8678, 0x5fcc9b6a, 0x56c19064, 0x69e2a14e, 0x60efaa40, 0x7bf8b752, 0x72f5bc5c, 0x05bed506, 0x0cb3de08, 0x17a4c31a, 0x1ea9c814, 0x218af93e, 0x2887f230, 0x3390ef22, 0x3a9de42c, 0xdd063d96, 0xd40b3698, 0xcf1c2b8a, 0xc6112084, 0xf93211ae, 0xf03f1aa0, 0xeb2807b2, 0xe2250cbc, 0x956e65e6, 0x9c636ee8, 0x877473fa, 0x8e7978f4, 0xb15a49de, 0xb85742d0, 0xa3405fc2, 0xaa4d54cc, 0xecdaf741, 0xe5d7fc4f, 0xfec0e15d, 0xf7cdea53, 0xc8eedb79, 0xc1e3d077, 0xdaf4cd65, 0xd3f9c66b, 0xa4b2af31, 0xadbfa43f, 0xb6a8b92d, 0xbfa5b223, 0x80868309, 0x898b8807, 0x929c9515, 0x9b919e1b, 0x7c0a47a1, 0x75074caf, 0x6e1051bd, 0x671d5ab3, 0x583e6b99, 0x51336097, 0x4a247d85, 0x4329768b, 0x34621fd1, 0x3d6f14df, 0x267809cd, 0x2f7502c3, 0x105633e9, 0x195b38e7, 0x024c25f5, 0x0b412efb, 0xd7618c9a, 0xde6c8794, 0xc57b9a86, 0xcc769188, 0xf355a0a2, 0xfa58abac, 0xe14fb6be, 0xe842bdb0, 0x9f09d4ea, 0x9604dfe4, 0x8d13c2f6, 0x841ec9f8, 0xbb3df8d2, 0xb230f3dc, 0xa927eece, 0xa02ae5c0, 0x47b13c7a, 0x4ebc3774, 0x55ab2a66, 0x5ca62168, 0x63851042, 0x6a881b4c, 0x719f065e, 0x78920d50, 0x0fd9640a, 0x06d46f04, 0x1dc37216, 0x14ce7918, 0x2bed4832, 0x22e0433c, 0x39f75e2e, 0x30fa5520, 0x9ab701ec, 0x93ba0ae2, 0x88ad17f0, 0x81a01cfe, 0xbe832dd4, 0xb78e26da, 0xac993bc8, 0xa59430c6, 0xd2df599c, 0xdbd25292, 0xc0c54f80, 0xc9c8448e, 0xf6eb75a4, 0xffe67eaa, 0xe4f163b8, 0xedfc68b6, 0x0a67b10c, 0x036aba02, 0x187da710, 0x1170ac1e, 0x2e539d34, 0x275e963a, 0x3c498b28, 0x35448026, 0x420fe97c, 0x4b02e272, 0x5015ff60, 0x5918f46e, 0x663bc544, 0x6f36ce4a, 0x7421d358, 0x7d2cd856, 0xa10c7a37, 0xa8017139, 0xb3166c2b, 0xba1b6725, 0x8538560f, 0x8c355d01, 0x97224013, 0x9e2f4b1d, 0xe9642247, 0xe0692949, 0xfb7e345b, 0xf2733f55, 0xcd500e7f, 0xc45d0571, 0xdf4a1863, 0xd647136d, 0x31dccad7, 0x38d1c1d9, 0x23c6dccb, 0x2acbd7c5, 0x15e8e6ef, 0x1ce5ede1, 0x07f2f0f3, 0x0efffbfd, 0x79b492a7, 0x70b999a9, 0x6bae84bb, 0x62a38fb5, 0x5d80be9f, 0x548db591, 0x4f9aa883, 0x4697a38d ] - - def __init__(self, key): - - if len(key) not in (16, 24, 32): - raise ValueError('Invalid key size') - - rounds = self.number_of_rounds[len(key)] - - # Encryption round keys - self._Ke = [[0] * 4 for i in xrange(rounds + 1)] - - # Decryption round keys - self._Kd = [[0] * 4 for i in xrange(rounds + 1)] - - round_key_count = (rounds + 1) * 4 - KC = len(key) // 4 - - # Convert the key into ints - tk = [ struct.unpack('>i', key[i:i + 4])[0] for i in xrange(0, len(key), 4) ] - - # Copy values into round key arrays - for i in xrange(0, KC): - self._Ke[i // 4][i % 4] = tk[i] - self._Kd[rounds - (i // 4)][i % 4] = tk[i] - - # Key expansion (fips-197 section 5.2) - rconpointer = 0 - t = KC - while t < round_key_count: - - tt = tk[KC - 1] - tk[0] ^= ((self.S[(tt >> 16) & 0xFF] << 24) ^ - (self.S[(tt >> 8) & 0xFF] << 16) ^ - (self.S[ tt & 0xFF] << 8) ^ - self.S[(tt >> 24) & 0xFF] ^ - (self.rcon[rconpointer] << 24)) - rconpointer += 1 - - if KC != 8: - for i in xrange(1, KC): - tk[i] ^= tk[i - 1] - - # Key expansion for 256-bit keys is "slightly different" (fips-197) - else: - for i in xrange(1, KC // 2): - tk[i] ^= tk[i - 1] - tt = tk[KC // 2 - 1] - - tk[KC // 2] ^= (self.S[ tt & 0xFF] ^ - (self.S[(tt >> 8) & 0xFF] << 8) ^ - (self.S[(tt >> 16) & 0xFF] << 16) ^ - (self.S[(tt >> 24) & 0xFF] << 24)) - - for i in xrange(KC // 2 + 1, KC): - tk[i] ^= tk[i - 1] - - # Copy values into round key arrays - j = 0 - while j < KC and t < round_key_count: - self._Ke[t // 4][t % 4] = tk[j] - self._Kd[rounds - (t // 4)][t % 4] = tk[j] - j += 1 - t += 1 - - # Inverse-Cipher-ify the decryption round key (fips-197 section 5.3) - for r in xrange(1, rounds): - for j in xrange(0, 4): - tt = self._Kd[r][j] - self._Kd[r][j] = (self.U1[(tt >> 24) & 0xFF] ^ - self.U2[(tt >> 16) & 0xFF] ^ - self.U3[(tt >> 8) & 0xFF] ^ - self.U4[ tt & 0xFF]) - - def encrypt(self, plaintext): - 'Encrypt a block of plain text using the AES block cipher.' - - if len(plaintext) != 16: - raise ValueError('wrong block length') - - rounds = len(self._Ke) - 1 - (s1, s2, s3) = [1, 2, 3] - a = [0, 0, 0, 0] - - # Convert plaintext to (ints ^ key) - t = [(_compact_word(plaintext[4 * i:4 * i + 4]) ^ self._Ke[0][i]) for i in xrange(0, 4)] - - # Apply round transforms - for r in xrange(1, rounds): - for i in xrange(0, 4): - a[i] = (self.T1[(t[ i ] >> 24) & 0xFF] ^ - self.T2[(t[(i + s1) % 4] >> 16) & 0xFF] ^ - self.T3[(t[(i + s2) % 4] >> 8) & 0xFF] ^ - self.T4[ t[(i + s3) % 4] & 0xFF] ^ - self._Ke[r][i]) - t = copy.copy(a) - - # The last round is special - result = [ ] - for i in xrange(0, 4): - tt = self._Ke[rounds][i] - result.append((self.S[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF) - result.append((self.S[(t[(i + s1) % 4] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF) - result.append((self.S[(t[(i + s2) % 4] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF) - result.append((self.S[ t[(i + s3) % 4] & 0xFF] ^ tt ) & 0xFF) - - return result - - def decrypt(self, ciphertext): - 'Decrypt a block of cipher text using the AES block cipher.' - - if len(ciphertext) != 16: - raise ValueError('wrong block length') - - rounds = len(self._Kd) - 1 - (s1, s2, s3) = [3, 2, 1] - a = [0, 0, 0, 0] - - # Convert ciphertext to (ints ^ key) - t = [(_compact_word(ciphertext[4 * i:4 * i + 4]) ^ self._Kd[0][i]) for i in xrange(0, 4)] - - # Apply round transforms - for r in xrange(1, rounds): - for i in xrange(0, 4): - a[i] = (self.T5[(t[ i ] >> 24) & 0xFF] ^ - self.T6[(t[(i + s1) % 4] >> 16) & 0xFF] ^ - self.T7[(t[(i + s2) % 4] >> 8) & 0xFF] ^ - self.T8[ t[(i + s3) % 4] & 0xFF] ^ - self._Kd[r][i]) - t = copy.copy(a) - - # The last round is special - result = [ ] - for i in xrange(0, 4): - tt = self._Kd[rounds][i] - result.append((self.Si[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF) - result.append((self.Si[(t[(i + s1) % 4] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF) - result.append((self.Si[(t[(i + s2) % 4] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF) - result.append((self.Si[ t[(i + s3) % 4] & 0xFF] ^ tt ) & 0xFF) - - return result - - -class Counter(object): - '''A counter object for the Counter (CTR) mode of operation. - - To create a custom counter, you can usually just override the - increment method.''' - - def __init__(self, initial_value = 1): - - # Convert the value into an array of bytes long - self._counter = [ ((initial_value >> i) % 256) for i in xrange(128 - 8, -1, -8) ] - - value = property(lambda s: s._counter) - - def increment(self): - '''Increment the counter (overflow rolls back to 0).''' - - for i in xrange(len(self._counter) - 1, -1, -1): - self._counter[i] += 1 - - if self._counter[i] < 256: break - - # Carry the one - self._counter[i] = 0 - - # Overflow - else: - self._counter = [ 0 ] * len(self._counter) - - -class AESBlockModeOfOperation(object): - '''Super-class for AES modes of operation that require blocks.''' - def __init__(self, key): - self._aes = AES(key) - - def decrypt(self, ciphertext): - raise Exception('not implemented') - - def encrypt(self, plaintext): - raise Exception('not implemented') - - -class AESStreamModeOfOperation(AESBlockModeOfOperation): - '''Super-class for AES modes of operation that are stream-ciphers.''' - -class AESSegmentModeOfOperation(AESStreamModeOfOperation): - '''Super-class for AES modes of operation that segment data.''' - - segment_bytes = 16 - - - -class AESModeOfOperationECB(AESBlockModeOfOperation): - '''AES Electronic Codebook Mode of Operation. - - o Block-cipher, so data must be padded to 16 byte boundaries - - Security Notes: - o This mode is not recommended - o Any two identical blocks produce identical encrypted values, - exposing data patterns. (See the image of Tux on wikipedia) - - Also see: - o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Electronic_codebook_.28ECB.29 - o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.1''' - - - name = "Electronic Codebook (ECB)" - - def encrypt(self, plaintext): - if len(plaintext) != 16: - raise ValueError('plaintext block must be 16 bytes') - - plaintext = _string_to_bytes(plaintext) - return _bytes_to_string(self._aes.encrypt(plaintext)) - - def decrypt(self, ciphertext): - if len(ciphertext) != 16: - raise ValueError('ciphertext block must be 16 bytes') - - ciphertext = _string_to_bytes(ciphertext) - return _bytes_to_string(self._aes.decrypt(ciphertext)) - - - -class AESModeOfOperationCBC(AESBlockModeOfOperation): - '''AES Cipher-Block Chaining Mode of Operation. - - o The Initialization Vector (IV) - o Block-cipher, so data must be padded to 16 byte boundaries - o An incorrect initialization vector will only cause the first - block to be corrupt; all other blocks will be intact - o A corrupt bit in the cipher text will cause a block to be - corrupted, and the next block to be inverted, but all other - blocks will be intact. - - Security Notes: - o This method (and CTR) ARE recommended. - - Also see: - o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher-block_chaining_.28CBC.29 - o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.2''' - - - name = "Cipher-Block Chaining (CBC)" - - def __init__(self, key, iv = None): - if iv is None: - self._last_cipherblock = [ 0 ] * 16 - elif len(iv) != 16: - raise ValueError('initialization vector must be 16 bytes') - else: - self._last_cipherblock = _string_to_bytes(iv) - - AESBlockModeOfOperation.__init__(self, key) - - def encrypt(self, plaintext): - if len(plaintext) != 16: - raise ValueError('plaintext block must be 16 bytes') - - plaintext = _string_to_bytes(plaintext) - precipherblock = [ (p ^ l) for (p, l) in zip(plaintext, self._last_cipherblock) ] - self._last_cipherblock = self._aes.encrypt(precipherblock) - - return _bytes_to_string(self._last_cipherblock) - - def decrypt(self, ciphertext): - if len(ciphertext) != 16: - raise ValueError('ciphertext block must be 16 bytes') - - cipherblock = _string_to_bytes(ciphertext) - plaintext = [ (p ^ l) for (p, l) in zip(self._aes.decrypt(cipherblock), self._last_cipherblock) ] - self._last_cipherblock = cipherblock - - return _bytes_to_string(plaintext) - - - -class AESModeOfOperationCFB(AESSegmentModeOfOperation): - '''AES Cipher Feedback Mode of Operation. - - o A stream-cipher, so input does not need to be padded to blocks, - but does need to be padded to segment_size - - Also see: - o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_feedback_.28CFB.29 - o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.3''' - - - name = "Cipher Feedback (CFB)" - - def __init__(self, key, iv, segment_size = 1): - if segment_size == 0: segment_size = 1 - - if iv is None: - self._shift_register = [ 0 ] * 16 - elif len(iv) != 16: - raise ValueError('initialization vector must be 16 bytes') - else: - self._shift_register = _string_to_bytes(iv) - - self._segment_bytes = segment_size - - AESBlockModeOfOperation.__init__(self, key) - - segment_bytes = property(lambda s: s._segment_bytes) - - def encrypt(self, plaintext): - if len(plaintext) % self._segment_bytes != 0: - raise ValueError('plaintext block must be a multiple of segment_size') - - plaintext = _string_to_bytes(plaintext) - - # Break block into segments - encrypted = [ ] - for i in xrange(0, len(plaintext), self._segment_bytes): - plaintext_segment = plaintext[i: i + self._segment_bytes] - xor_segment = self._aes.encrypt(self._shift_register)[:len(plaintext_segment)] - cipher_segment = [ (p ^ x) for (p, x) in zip(plaintext_segment, xor_segment) ] - - # Shift the top bits out and the ciphertext in - self._shift_register = _concat_list(self._shift_register[len(cipher_segment):], cipher_segment) - - encrypted.extend(cipher_segment) - - return _bytes_to_string(encrypted) - - def decrypt(self, ciphertext): - if len(ciphertext) % self._segment_bytes != 0: - raise ValueError('ciphertext block must be a multiple of segment_size') - - ciphertext = _string_to_bytes(ciphertext) - - # Break block into segments - decrypted = [ ] - for i in xrange(0, len(ciphertext), self._segment_bytes): - cipher_segment = ciphertext[i: i + self._segment_bytes] - xor_segment = self._aes.encrypt(self._shift_register)[:len(cipher_segment)] - plaintext_segment = [ (p ^ x) for (p, x) in zip(cipher_segment, xor_segment) ] - - # Shift the top bits out and the ciphertext in - self._shift_register = _concat_list(self._shift_register[len(cipher_segment):], cipher_segment) - - decrypted.extend(plaintext_segment) - - return _bytes_to_string(decrypted) - - - -class AESModeOfOperationOFB(AESStreamModeOfOperation): - '''AES Output Feedback Mode of Operation. - - o A stream-cipher, so input does not need to be padded to blocks, - allowing arbitrary length data. - o A bit twiddled in the cipher text, twiddles the same bit in the - same bit in the plain text, which can be useful for error - correction techniques. - - Also see: - o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Output_feedback_.28OFB.29 - o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.4''' - - - name = "Output Feedback (OFB)" - - def __init__(self, key, iv = None): - if iv is None: - self._last_precipherblock = [ 0 ] * 16 - elif len(iv) != 16: - raise ValueError('initialization vector must be 16 bytes') - else: - self._last_precipherblock = _string_to_bytes(iv) - - self._remaining_block = [ ] - - AESBlockModeOfOperation.__init__(self, key) - - def encrypt(self, plaintext): - encrypted = [ ] - for p in _string_to_bytes(plaintext): - if len(self._remaining_block) == 0: - self._remaining_block = self._aes.encrypt(self._last_precipherblock) - self._last_precipherblock = [ ] - precipherbyte = self._remaining_block.pop(0) - self._last_precipherblock.append(precipherbyte) - cipherbyte = p ^ precipherbyte - encrypted.append(cipherbyte) - - return _bytes_to_string(encrypted) - - def decrypt(self, ciphertext): - # AES-OFB is symetric - return self.encrypt(ciphertext) - - - -class AESModeOfOperationCTR(AESStreamModeOfOperation): - '''AES Counter Mode of Operation. - - o A stream-cipher, so input does not need to be padded to blocks, - allowing arbitrary length data. - o The counter must be the same size as the key size (ie. len(key)) - o Each block independant of the other, so a corrupt byte will not - damage future blocks. - o Each block has a uniue counter value associated with it, which - contributes to the encrypted value, so no data patterns are - leaked. - o Also known as: Counter Mode (CM), Integer Counter Mode (ICM) and - Segmented Integer Counter (SIC - - Security Notes: - o This method (and CBC) ARE recommended. - o Each message block is associated with a counter value which must be - unique for ALL messages with the same key. Otherwise security may be - compromised. - - Also see: - - o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Counter_.28CTR.29 - o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.5 - and Appendix B for managing the initial counter''' - - - name = "Counter (CTR)" - - def __init__(self, key, counter = None): - AESBlockModeOfOperation.__init__(self, key) - - if counter is None: - counter = Counter() - - self._counter = counter - self._remaining_counter = [ ] - - def encrypt(self, plaintext): - while len(self._remaining_counter) < len(plaintext): - self._remaining_counter += self._aes.encrypt(self._counter.value) - self._counter.increment() - - plaintext = _string_to_bytes(plaintext) - - encrypted = [ (p ^ c) for (p, c) in zip(plaintext, self._remaining_counter) ] - self._remaining_counter = self._remaining_counter[len(encrypted):] - - return _bytes_to_string(encrypted) - - def decrypt(self, crypttext): - # AES-CTR is symetric - return self.encrypt(crypttext) - - -# Simple lookup table for each mode -AESModesOfOperation = dict( - ctr = AESModeOfOperationCTR, - cbc = AESModeOfOperationCBC, - cfb = AESModeOfOperationCFB, - ecb = AESModeOfOperationECB, - ofb = AESModeOfOperationOFB, -) diff --git a/src/lib/pyaes/blockfeeder.py b/src/lib/pyaes/blockfeeder.py deleted file mode 100644 index b9a904d2..00000000 --- a/src/lib/pyaes/blockfeeder.py +++ /dev/null @@ -1,227 +0,0 @@ -# The MIT License (MIT) -# -# Copyright (c) 2014 Richard Moore -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - - -from .aes import AESBlockModeOfOperation, AESSegmentModeOfOperation, AESStreamModeOfOperation -from .util import append_PKCS7_padding, strip_PKCS7_padding, to_bufferable - - -# First we inject three functions to each of the modes of operations -# -# _can_consume(size) -# - Given a size, determine how many bytes could be consumed in -# a single call to either the decrypt or encrypt method -# -# _final_encrypt(data, padding = PADDING_DEFAULT) -# - call and return encrypt on this (last) chunk of data, -# padding as necessary; this will always be at least 16 -# bytes unless the total incoming input was less than 16 -# bytes -# -# _final_decrypt(data, padding = PADDING_DEFAULT) -# - same as _final_encrypt except for decrypt, for -# stripping off padding -# - -PADDING_NONE = 'none' -PADDING_DEFAULT = 'default' - -# @TODO: Ciphertext stealing and explicit PKCS#7 -# PADDING_CIPHERTEXT_STEALING -# PADDING_PKCS7 - -# ECB and CBC are block-only ciphers - -def _block_can_consume(self, size): - if size >= 16: return 16 - return 0 - -# After padding, we may have more than one block -def _block_final_encrypt(self, data, padding = PADDING_DEFAULT): - if padding == PADDING_DEFAULT: - data = append_PKCS7_padding(data) - - elif padding == PADDING_NONE: - if len(data) != 16: - raise Exception('invalid data length for final block') - else: - raise Exception('invalid padding option') - - if len(data) == 32: - return self.encrypt(data[:16]) + self.encrypt(data[16:]) - - return self.encrypt(data) - - -def _block_final_decrypt(self, data, padding = PADDING_DEFAULT): - if padding == PADDING_DEFAULT: - return strip_PKCS7_padding(self.decrypt(data)) - - if padding == PADDING_NONE: - if len(data) != 16: - raise Exception('invalid data length for final block') - return self.decrypt(data) - - raise Exception('invalid padding option') - -AESBlockModeOfOperation._can_consume = _block_can_consume -AESBlockModeOfOperation._final_encrypt = _block_final_encrypt -AESBlockModeOfOperation._final_decrypt = _block_final_decrypt - - - -# CFB is a segment cipher - -def _segment_can_consume(self, size): - return self.segment_bytes * int(size // self.segment_bytes) - -# CFB can handle a non-segment-sized block at the end using the remaining cipherblock -def _segment_final_encrypt(self, data, padding = PADDING_DEFAULT): - if padding != PADDING_DEFAULT: - raise Exception('invalid padding option') - - faux_padding = (chr(0) * (self.segment_bytes - (len(data) % self.segment_bytes))) - padded = data + to_bufferable(faux_padding) - return self.encrypt(padded)[:len(data)] - -# CFB can handle a non-segment-sized block at the end using the remaining cipherblock -def _segment_final_decrypt(self, data, padding = PADDING_DEFAULT): - if padding != PADDING_DEFAULT: - raise Exception('invalid padding option') - - faux_padding = (chr(0) * (self.segment_bytes - (len(data) % self.segment_bytes))) - padded = data + to_bufferable(faux_padding) - return self.decrypt(padded)[:len(data)] - -AESSegmentModeOfOperation._can_consume = _segment_can_consume -AESSegmentModeOfOperation._final_encrypt = _segment_final_encrypt -AESSegmentModeOfOperation._final_decrypt = _segment_final_decrypt - - - -# OFB and CTR are stream ciphers - -def _stream_can_consume(self, size): - return size - -def _stream_final_encrypt(self, data, padding = PADDING_DEFAULT): - if padding not in [PADDING_NONE, PADDING_DEFAULT]: - raise Exception('invalid padding option') - - return self.encrypt(data) - -def _stream_final_decrypt(self, data, padding = PADDING_DEFAULT): - if padding not in [PADDING_NONE, PADDING_DEFAULT]: - raise Exception('invalid padding option') - - return self.decrypt(data) - -AESStreamModeOfOperation._can_consume = _stream_can_consume -AESStreamModeOfOperation._final_encrypt = _stream_final_encrypt -AESStreamModeOfOperation._final_decrypt = _stream_final_decrypt - - - -class BlockFeeder(object): - '''The super-class for objects to handle chunking a stream of bytes - into the appropriate block size for the underlying mode of operation - and applying (or stripping) padding, as necessary.''' - - def __init__(self, mode, feed, final, padding = PADDING_DEFAULT): - self._mode = mode - self._feed = feed - self._final = final - self._buffer = to_bufferable("") - self._padding = padding - - def feed(self, data = None): - '''Provide bytes to encrypt (or decrypt), returning any bytes - possible from this or any previous calls to feed. - - Call with None or an empty string to flush the mode of - operation and return any final bytes; no further calls to - feed may be made.''' - - if self._buffer is None: - raise ValueError('already finished feeder') - - # Finalize; process the spare bytes we were keeping - if data is None: - result = self._final(self._buffer, self._padding) - self._buffer = None - return result - - self._buffer += to_bufferable(data) - - # We keep 16 bytes around so we can determine padding - result = to_bufferable('') - while len(self._buffer) > 16: - can_consume = self._mode._can_consume(len(self._buffer) - 16) - if can_consume == 0: break - result += self._feed(self._buffer[:can_consume]) - self._buffer = self._buffer[can_consume:] - - return result - - -class Encrypter(BlockFeeder): - 'Accepts bytes of plaintext and returns encrypted ciphertext.' - - def __init__(self, mode, padding = PADDING_DEFAULT): - BlockFeeder.__init__(self, mode, mode.encrypt, mode._final_encrypt, padding) - - -class Decrypter(BlockFeeder): - 'Accepts bytes of ciphertext and returns decrypted plaintext.' - - def __init__(self, mode, padding = PADDING_DEFAULT): - BlockFeeder.__init__(self, mode, mode.decrypt, mode._final_decrypt, padding) - - -# 8kb blocks -BLOCK_SIZE = (1 << 13) - -def _feed_stream(feeder, in_stream, out_stream, block_size = BLOCK_SIZE): - 'Uses feeder to read and convert from in_stream and write to out_stream.' - - while True: - chunk = in_stream.read(block_size) - if not chunk: - break - converted = feeder.feed(chunk) - out_stream.write(converted) - converted = feeder.feed() - out_stream.write(converted) - - -def encrypt_stream(mode, in_stream, out_stream, block_size = BLOCK_SIZE, padding = PADDING_DEFAULT): - 'Encrypts a stream of bytes from in_stream to out_stream using mode.' - - encrypter = Encrypter(mode, padding = padding) - _feed_stream(encrypter, in_stream, out_stream, block_size) - - -def decrypt_stream(mode, in_stream, out_stream, block_size = BLOCK_SIZE, padding = PADDING_DEFAULT): - 'Decrypts a stream of bytes from in_stream to out_stream using mode.' - - decrypter = Decrypter(mode, padding = padding) - _feed_stream(decrypter, in_stream, out_stream, block_size) diff --git a/src/lib/pyaes/util.py b/src/lib/pyaes/util.py deleted file mode 100644 index 081a3759..00000000 --- a/src/lib/pyaes/util.py +++ /dev/null @@ -1,60 +0,0 @@ -# The MIT License (MIT) -# -# Copyright (c) 2014 Richard Moore -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -# Why to_bufferable? -# Python 3 is very different from Python 2.x when it comes to strings of text -# and strings of bytes; in Python 3, strings of bytes do not exist, instead to -# represent arbitrary binary data, we must use the "bytes" object. This method -# ensures the object behaves as we need it to. - -def to_bufferable(binary): - return binary - -def _get_byte(c): - return ord(c) - -try: - xrange -except: - - def to_bufferable(binary): - if isinstance(binary, bytes): - return binary - return bytes(ord(b) for b in binary) - - def _get_byte(c): - return c - -def append_PKCS7_padding(data): - pad = 16 - (len(data) % 16) - return data + to_bufferable(chr(pad) * pad) - -def strip_PKCS7_padding(data): - if len(data) % 16 != 0: - raise ValueError("invalid length") - - pad = _get_byte(data[-1]) - - if pad > 16: - raise ValueError("invalid padding byte") - - return data[:-pad] diff --git a/src/lib/pyaes/LICENSE.txt b/src/lib/pybitcointools/LICENSE similarity index 80% rename from src/lib/pyaes/LICENSE.txt rename to src/lib/pybitcointools/LICENSE index 0417a6c2..c47d4ad0 100644 --- a/src/lib/pyaes/LICENSE.txt +++ b/src/lib/pybitcointools/LICENSE @@ -1,6 +1,12 @@ +This code is public domain. Everyone has the right to do whatever they want +with it for any purpose. + +In case your jurisdiction does not consider the above disclaimer valid or +enforceable, here's an MIT license for you: + The MIT License (MIT) -Copyright (c) 2014 Richard Moore +Copyright (c) 2013 Vitalik Buterin Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -19,4 +25,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/src/lib/pybitcointools/MANIFEST.in b/src/lib/pybitcointools/MANIFEST.in new file mode 100644 index 00000000..1aba38f6 --- /dev/null +++ b/src/lib/pybitcointools/MANIFEST.in @@ -0,0 +1 @@ +include LICENSE diff --git a/src/lib/pybitcointools/README.txt b/src/lib/pybitcointools/README.txt new file mode 100644 index 00000000..2f2876e7 --- /dev/null +++ b/src/lib/pybitcointools/README.txt @@ -0,0 +1,142 @@ +# Pybitcointools, Python library for Bitcoin signatures and transactions + +### Advantages: + +* Functions have a simple interface, inputting and outputting in standard formats +* No classes +* Many functions can be taken out and used individually +* Supports binary, hex and base58 +* Transaction deserialization format almost compatible with BitcoinJS +* Electrum and BIP0032 support +* Make and publish a transaction all in a single command line instruction +* Includes non-bitcoin-specific conversion and JSON utilities + +### Disadvantages: + +* Not a full node, has no idea what blocks are +* Relies on centralized service (blockchain.info) for blockchain operations, although operations do have backups (eligius, blockr.io) + +### Example usage (best way to learn :) ): + + > from bitcoin import * + > priv = sha256('some big long brainwallet password') + > priv + '57c617d9b4e1f7af6ec97ca2ff57e94a28279a7eedd4d12a99fa11170e94f5a4' + > pub = privtopub(priv) + > pub + '0420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9' + > addr = pubtoaddr(pub) + > addr + '1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6' + > h = history(addr) + > h + [{'output': u'97f7c7d8ac85e40c255f8a763b6cd9a68f3a94d2e93e8bfa08f977b92e55465e:0', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}, {'output': u'4cc806bb04f730c445c60b3e0f4f44b54769a1c196ca37d8d4002135e4abd171:1', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}] + > outs = [{'value': 90000, 'address': '16iw1MQ1sy1DtRPYw3ao1bCamoyBJtRB4t'}] + > tx = mktx(h,outs) + > tx + '01000000025e46552eb977f908fa8b3ee9d2943a8fa6d96c3b768a5f250ce485acd8c7f7970000000000ffffffff71d1abe4352100d4d837ca96c1a16947b5444f0f3e0bc645c430f704bb06c84c0100000000ffffffff01905f0100000000001976a9143ec6c3ed8dfc3ceabcc1cbdb0c5aef4e2d02873c88ac00000000' + > tx2 = sign(tx,0,priv) + > tx2 + '01000000025e46552eb977f908fa8b3ee9d2943a8fa6d96c3b768a5f250ce485acd8c7f797000000008b483045022100dd29d89a28451febb990fb1dafa21245b105140083ced315ebcdea187572b3990220713f2e554f384d29d7abfedf39f0eb92afba0ef46f374e49d43a728a0ff6046e01410420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9ffffffff71d1abe4352100d4d837ca96c1a16947b5444f0f3e0bc645c430f704bb06c84c0100000000ffffffff01905f0100000000001976a9143ec6c3ed8dfc3ceabcc1cbdb0c5aef4e2d02873c88ac00000000' + > tx3 = sign(tx2,1,priv) + > tx3 + '01000000025e46552eb977f908fa8b3ee9d2943a8fa6d96c3b768a5f250ce485acd8c7f797000000008b483045022100dd29d89a28451febb990fb1dafa21245b105140083ced315ebcdea187572b3990220713f2e554f384d29d7abfedf39f0eb92afba0ef46f374e49d43a728a0ff6046e01410420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9ffffffff71d1abe4352100d4d837ca96c1a16947b5444f0f3e0bc645c430f704bb06c84c010000008c4930460221008bbaaaf172adfefc3a1315dc7312c88645832ff76d52e0029d127e65bbeeabe1022100fdeb89658d503cf2737cedb4049e5070f689c50a9b6c85997d49e0787938f93901410420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9ffffffff01905f0100000000001976a9143ec6c3ed8dfc3ceabcc1cbdb0c5aef4e2d02873c88ac00000000' + > pushtx(tx3) + 'Transaction Submitted' + +Or using the pybtctool command line interface: + + @vub: pybtctool random_electrum_seed + 484ccb566edb66c65dd0fd2e4d90ef65 + + @vub: pybtctool electrum_privkey 484ccb566edb66c65dd0fd2e4d90ef65 0 0 + 593240c2205e7b7b5d7c13393b7c9553497854b75c7470b76aeca50cd4a894d7 + + @vub: pybtctool electrum_mpk 484ccb566edb66c65dd0fd2e4d90ef65 + 484e42865b8e9a6ea8262fd1cde666b557393258ed598d842e563ad9e5e6c70a97e387eefdef123c1b8b4eb21fe210c6216ad7cc1e4186fbbba70f0e2c062c25 + + @vub: pybtctool bip32_master_key 21456t243rhgtucyadh3wgyrcubw3grydfbng + xprv9s21ZrQH143K2napkeoHT48gWmoJa89KCQj4nqLfdGybyWHP9Z8jvCGzuEDv4ihCyoed7RFPNbc9NxoSF7cAvH9AaNSvepUaeqbSpJZ4rbT + + @vub: pybtctool bip32_ckd xprv9s21ZrQH143K2napkeoHT48gWmoJa89KCQj4nqLfdGybyWHP9Z8jvCGzuEDv4ihCyoed7RFPNbc9NxoSF7cAvH9AaNSvepUaeqbSpJZ4rbT 0 + xprv9vfzYrpwo7QHFdtrcvsSCTrBESFPUf1g7NRvayy1QkEfUekpDKLfqvHjgypF5w3nAvnwPjtQUNkyywWNkLbiUS95khfHCzJXFkLEdwRepbw + + @vub: pybtctool bip32_privtopub xprv9s21ZrQH143K2napkeoHT48gWmoJa89KCQj4nqLfdGybyWHP9Z8jvCGzuEDv4ihCyoed7RFPNbc9NxoSF7cAvH9AaNSvepUaeqbSpJZ4rbT + xpub661MyMwAqRbcFGfHrgLHpC5R4odnyasAZdefbDkHBcWarJcXh6SzTzbUkWuhnP142ZFdKdAJSuTSaiGDYjvm7bCLmA8DZqksYjJbYmcgrYF + +The -s option lets you read arguments from the command line + + @vub: pybtctool sha256 'some big long brainwallet password' | pybtctool -s privtoaddr | pybtctool -s history + [{'output': u'97f7c7d8ac85e40c255f8a763b6cd9a68f3a94d2e93e8bfa08f977b92e55465e:0', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}, {'output': u'4cc806bb04f730c445c60b3e0f4f44b54769a1c196ca37d8d4002135e4abd171:1', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}] + @vub: pybtctool random_electrum_seed | pybtctool -s electrum_privkey 0 0 + 593240c2205e7b7b5d7c13393b7c9553497854b75c7470b76aeca50cd4a894d7 + +The -b option lets you read binary data as an argument + + @vub: pybtctool sha256 123 | pybtctool -s changebase 16 256 | pybtctool -b changebase 256 16 + a665a45920422f9d417e4867efdc4fb8a04a1f3fff1fa07e998e86f7f7a27ae30a + +The -j option lets you read json from the command line (-J to split a json list into multiple arguments) + + @vub: pybtctool unspent 1FxkfJQLJTXpW6QmxGT6oF43ZH959ns8Cq | pybtctool -j select 200000001 | pybtctool -j mksend 1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P:20000 1FxkfJQLJTXpW6QmxGT6oF43ZH959ns8Cq 1000 | pybtctool -s signall 805cd74ca322633372b9bfb857f3be41db0b8de43a3c44353b238c0acff9d523 + 0100000003d5001aae8358ae98cb02c1b6f9859dc1ac3dbc1e9cc88632afeb7b7e3c510a49000000008b4830450221009e03bb6122437767e2ca785535824f4ed13d2ebbb9fa4f9becc6d6f4e1e217dc022064577353c08d8d974250143d920d3b963b463e43bbb90f3371060645c49266b90141048ef80f6bd6b073407a69299c2ba89de48adb59bb9689a5ab040befbbebcfbb15d01b006a6b825121a0d2c546c277acb60f0bd3203bd501b8d67c7dba91f27f47ffffffff1529d655dff6a0f6c9815ee835312fb3ca4df622fde21b6b9097666e9284087d010000008a473044022035dd67d18b575ebd339d05ca6ffa1d27d7549bd993aeaf430985795459fc139402201aaa162cc50181cee493870c9479b1148243a33923cb77be44a73ca554a4e5d60141048ef80f6bd6b073407a69299c2ba89de48adb59bb9689a5ab040befbbebcfbb15d01b006a6b825121a0d2c546c277acb60f0bd3203bd501b8d67c7dba91f27f47ffffffff23d5f9cf0a8c233b35443c3ae48d0bdb41bef357b8bfb972336322a34cd75c80010000008b483045022014daa5c5bbe9b3e5f2539a5cd8e22ce55bc84788f946c5b3643ecac85b4591a9022100a4062074a1df3fa0aea5ef67368d0b1f0eaac520bee6e417c682d83cd04330450141048ef80f6bd6b073407a69299c2ba89de48adb59bb9689a5ab040befbbebcfbb15d01b006a6b825121a0d2c546c277acb60f0bd3203bd501b8d67c7dba91f27f47ffffffff02204e0000000000001976a914946cb2e08075bcbaf157e47bcb67eb2b2339d24288ac5b3c4411000000001976a914a41d15ae657ad3bfd0846771a34d7584c37d54a288ac00000000 + +Fun stuff with json: + + @vub: pybtctool history 1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P | pybtctool -j multiaccess value | pybtctool -j sum + 625216206372 + + @vub: pybtctool history 1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P | pybtctool -j count + 6198 + +### Listing of main commands: + +* privkey_to_pubkey : (privkey) -> pubkey +* privtopub : (privkey) -> pubkey +* pubkey_to_address : (pubkey) -> address +* pubtoaddr : (pubkey) -> address +* privkey_to_address : (privkey) -> address +* privtoaddr : (privkey) -> address + +* add : (key1, key2) -> key1 + key2 (works on privkeys or pubkeys) +* multiply : (pubkey, privkey) -> returns pubkey * privkey + +* ecdsa_sign : (message, privkey) -> sig +* ecdsa_verify : (message, sig, pubkey) -> True/False +* ecdsa_recover : (message, sig) -> pubkey + +* random_key : () -> privkey +* random_electrum_seed : () -> electrum seed + +* electrum_stretch : (seed) -> secret exponent +* electrum_privkey : (seed or secret exponent, i, type) -> privkey +* electrum_mpk : (seed or secret exponent) -> master public key +* electrum_pubkey : (seed or secexp or mpk) -> pubkey + +* bip32_master_key : (seed) -> bip32 master key +* bip32_ckd : (private or public bip32 key, i) -> child key +* bip32_privtopub : (private bip32 key) -> public bip32 key +* bip32_extract_key : (private or public bip32_key) -> privkey or pubkey + +* deserialize : (hex or bin transaction) -> JSON tx +* serialize : (JSON tx) -> hex or bin tx +* mktx : (inputs, outputs) -> tx +* mksend : (inputs, outputs, change_addr, fee) -> tx +* sign : (tx, i, privkey) -> tx with index i signed with privkey +* multisign : (tx, i, script, privkey) -> signature +* apply_multisignatures: (tx, i, script, sigs) -> tx with index i signed with sigs +* scriptaddr : (script) -> P2SH address +* mk_multisig_script : (pubkeys, k, n) -> k-of-n multisig script from pubkeys +* verify_tx_input : (tx, i, script, sig, pub) -> True/False +* tx_hash : (hex or bin tx) -> hash + +* history : (address1, address2, etc) -> outputs to those addresses +* unspent : (address1, address2, etc) -> unspent outputs to those addresses +* fetchtx : (txash) -> tx if present +* pushtx : (hex or bin tx) -> tries to push to blockchain.info/pushtx + +* access : (json list/object, prop) -> desired property of that json object +* multiaccess : (json list, prop) -> like access, but mapped across each list element +* slice : (json list, start, end) -> given slice of the list +* count : (json list) -> number of elements +* sum : (json list) -> sum of all values diff --git a/src/lib/pybitcointools/__init__.py b/src/lib/pybitcointools/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/lib/pybitcointools/bitcoin/__init__.py b/src/lib/pybitcointools/bitcoin/__init__.py new file mode 100644 index 00000000..8b543fee --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/__init__.py @@ -0,0 +1,9 @@ +from .py2specials import * +from .py3specials import * +from .main import * +from .transaction import * +from .deterministic import * +from .bci import * +from .composite import * +from .stealth import * +from .blocks import * diff --git a/src/lib/pybitcointools/bitcoin/bci.py b/src/lib/pybitcointools/bitcoin/bci.py new file mode 100644 index 00000000..2ff11d93 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/bci.py @@ -0,0 +1,384 @@ +#!/usr/bin/python +import json, re +import random +import sys +try: + from urllib.request import build_opener +except: + from urllib2 import build_opener + + +# Makes a request to a given URL (first arg) and optional params (second arg) +def make_request(*args): + opener = build_opener() + opener.addheaders = [('User-agent', + 'Mozilla/5.0'+str(random.randrange(1000000)))] + try: + return opener.open(*args).read().strip() + except Exception as e: + try: + p = e.read().strip() + except: + p = e + raise Exception(p) + + +def parse_addr_args(*args): + # Valid input formats: blockr_unspent([addr1, addr2,addr3]) + # blockr_unspent(addr1, addr2, addr3) + # blockr_unspent([addr1, addr2, addr3], network) + # blockr_unspent(addr1, addr2, addr3, network) + # Where network is 'btc' or 'testnet' + network = 'btc' + addr_args = args + if len(args) >= 1 and args[-1] in ('testnet', 'btc'): + network = args[-1] + addr_args = args[:-1] + if len(addr_args) == 1 and isinstance(addr_args, list): + addr_args = addr_args[0] + + return network, addr_args + + +# Gets the unspent outputs of one or more addresses +def bci_unspent(*args): + network, addrs = parse_addr_args(*args) + u = [] + for a in addrs: + try: + data = make_request('https://blockchain.info/unspent?address='+a) + except Exception as e: + if str(e) == 'No free outputs to spend': + continue + else: + raise Exception(e) + try: + jsonobj = json.loads(data) + for o in jsonobj["unspent_outputs"]: + h = o['tx_hash'].decode('hex')[::-1].encode('hex') + u.append({ + "output": h+':'+str(o['tx_output_n']), + "value": o['value'] + }) + except: + raise Exception("Failed to decode data: "+data) + return u + + +def blockr_unspent(*args): + # Valid input formats: blockr_unspent([addr1, addr2,addr3]) + # blockr_unspent(addr1, addr2, addr3) + # blockr_unspent([addr1, addr2, addr3], network) + # blockr_unspent(addr1, addr2, addr3, network) + # Where network is 'btc' or 'testnet' + network, addr_args = parse_addr_args(*args) + + if network == 'testnet': + blockr_url = 'https://tbtc.blockr.io/api/v1/address/unspent/' + elif network == 'btc': + blockr_url = 'https://btc.blockr.io/api/v1/address/unspent/' + else: + raise Exception( + 'Unsupported network {0} for blockr_unspent'.format(network)) + + if len(addr_args) == 0: + return [] + elif isinstance(addr_args[0], list): + addrs = addr_args[0] + else: + addrs = addr_args + res = make_request(blockr_url+','.join(addrs)) + data = json.loads(res)['data'] + o = [] + if 'unspent' in data: + data = [data] + for dat in data: + for u in dat['unspent']: + o.append({ + "output": u['tx']+':'+str(u['n']), + "value": int(u['amount'].replace('.', '')) + }) + return o + + +def helloblock_unspent(*args): + network, addrs = parse_addr_args(*args) + if network == 'testnet': + url = 'https://testnet.helloblock.io/v1/addresses/%s/unspents?limit=500&offset=%s' + elif network == 'btc': + url = 'https://mainnet.helloblock.io/v1/addresses/%s/unspents?limit=500&offset=%s' + o = [] + for addr in addrs: + for offset in xrange(0, 10**9, 500): + res = make_request(url % (addr, offset)) + data = json.loads(res)["data"] + if not len(data["unspents"]): + break + elif offset: + sys.stderr.write("Getting more unspents: %d\n" % offset) + for dat in data["unspents"]: + o.append({ + "output": dat["txHash"]+':'+str(dat["index"]), + "value": dat["value"], + }) + return o + + +unspent_getters = { + 'bci': bci_unspent, + 'blockr': blockr_unspent, + 'helloblock': helloblock_unspent +} + + +def unspent(*args, **kwargs): + f = unspent_getters.get(kwargs.get('source', ''), bci_unspent) + return f(*args) + + +# Gets the transaction output history of a given set of addresses, +# including whether or not they have been spent +def history(*args): + # Valid input formats: history([addr1, addr2,addr3]) + # history(addr1, addr2, addr3) + if len(args) == 0: + return [] + elif isinstance(args[0], list): + addrs = args[0] + else: + addrs = args + + txs = [] + for addr in addrs: + offset = 0 + while 1: + data = make_request( + 'https://blockchain.info/address/%s?format=json&offset=%s' % + (addr, offset)) + try: + jsonobj = json.loads(data) + except: + raise Exception("Failed to decode data: "+data) + txs.extend(jsonobj["txs"]) + if len(jsonobj["txs"]) < 50: + break + offset += 50 + sys.stderr.write("Fetching more transactions... "+str(offset)+'\n') + outs = {} + for tx in txs: + for o in tx["out"]: + if o['addr'] in addrs: + key = str(tx["tx_index"])+':'+str(o["n"]) + outs[key] = { + "address": o["addr"], + "value": o["value"], + "output": tx["hash"]+':'+str(o["n"]), + "block_height": tx.get("block_height", None) + } + for tx in txs: + for i, inp in enumerate(tx["inputs"]): + if inp["prev_out"]["addr"] in addrs: + key = str(inp["prev_out"]["tx_index"]) + \ + ':'+str(inp["prev_out"]["n"]) + if outs.get(key): + outs[key]["spend"] = tx["hash"]+':'+str(i) + return [outs[k] for k in outs] + + +# Pushes a transaction to the network using https://blockchain.info/pushtx +def bci_pushtx(tx): + if not re.match('^[0-9a-fA-F]*$', tx): + tx = tx.encode('hex') + return make_request('https://blockchain.info/pushtx', 'tx='+tx) + + +def eligius_pushtx(tx): + if not re.match('^[0-9a-fA-F]*$', tx): + tx = tx.encode('hex') + s = make_request( + 'http://eligius.st/~wizkid057/newstats/pushtxn.php', + 'transaction='+tx+'&send=Push') + strings = re.findall('string[^"]*"[^"]*"', s) + for string in strings: + quote = re.findall('"[^"]*"', string)[0] + if len(quote) >= 5: + return quote[1:-1] + + +def blockr_pushtx(tx, network='btc'): + if network == 'testnet': + blockr_url = 'https://tbtc.blockr.io/api/v1/tx/push' + elif network == 'btc': + blockr_url = 'https://btc.blockr.io/api/v1/tx/push' + else: + raise Exception( + 'Unsupported network {0} for blockr_pushtx'.format(network)) + + if not re.match('^[0-9a-fA-F]*$', tx): + tx = tx.encode('hex') + return make_request(blockr_url, '{"hex":"%s"}' % tx) + + +def helloblock_pushtx(tx): + if not re.match('^[0-9a-fA-F]*$', tx): + tx = tx.encode('hex') + return make_request('https://mainnet.helloblock.io/v1/transactions', + 'rawTxHex='+tx) + +pushtx_getters = { + 'bci': bci_pushtx, + 'blockr': blockr_pushtx, + 'helloblock': helloblock_pushtx +} + + +def pushtx(*args, **kwargs): + f = pushtx_getters.get(kwargs.get('source', ''), bci_pushtx) + return f(*args) + + +def last_block_height(): + data = make_request('https://blockchain.info/latestblock') + jsonobj = json.loads(data) + return jsonobj["height"] + + +# Gets a specific transaction +def bci_fetchtx(txhash): + if not re.match('^[0-9a-fA-F]*$', txhash): + txhash = txhash.encode('hex') + data = make_request('https://blockchain.info/rawtx/'+txhash+'?format=hex') + return data + + +def blockr_fetchtx(txhash, network='btc'): + if network == 'testnet': + blockr_url = 'https://tbtc.blockr.io/api/v1/tx/raw/' + elif network == 'btc': + blockr_url = 'https://btc.blockr.io/api/v1/tx/raw/' + else: + raise Exception( + 'Unsupported network {0} for blockr_fetchtx'.format(network)) + if not re.match('^[0-9a-fA-F]*$', txhash): + txhash = txhash.encode('hex') + jsondata = json.loads(make_request(blockr_url+txhash)) + return jsondata['data']['tx']['hex'] + + +def helloblock_fetchtx(txhash, network='btc'): + if not re.match('^[0-9a-fA-F]*$', txhash): + txhash = txhash.encode('hex') + if network == 'testnet': + url = 'https://testnet.helloblock.io/v1/transactions/' + elif network == 'btc': + url = 'https://mainnet.helloblock.io/v1/transactions/' + else: + raise Exception( + 'Unsupported network {0} for helloblock_fetchtx'.format(network)) + data = json.loads(make_request(url + txhash))["data"]["transaction"] + o = { + "locktime": data["locktime"], + "version": data["version"], + "ins": [], + "outs": [] + } + for inp in data["inputs"]: + o["ins"].append({ + "script": inp["scriptSig"], + "outpoint": { + "index": inp["prevTxoutIndex"], + "hash": inp["prevTxHash"], + }, + "sequence": 4294967295 + }) + for outp in data["outputs"]: + o["outs"].append({ + "value": outp["value"], + "script": outp["scriptPubKey"] + }) + from bitcoin.transaction import serialize + from bitcoin.transaction import txhash as TXHASH + tx = serialize(o) + assert TXHASH(tx) == txhash + return tx + + +fetchtx_getters = { + 'bci': bci_fetchtx, + 'blockr': blockr_fetchtx, + 'helloblock': helloblock_fetchtx +} + + +def fetchtx(*args, **kwargs): + f = fetchtx_getters.get(kwargs.get('source', ''), bci_fetchtx) + return f(*args) + + +def firstbits(address): + if len(address) >= 25: + return make_request('https://blockchain.info/q/getfirstbits/'+address) + else: + return make_request( + 'https://blockchain.info/q/resolvefirstbits/'+address) + + +def get_block_at_height(height): + j = json.loads(make_request("https://blockchain.info/block-height/" + + str(height)+"?format=json")) + for b in j['blocks']: + if b['main_chain'] is True: + return b + raise Exception("Block at this height not found") + + +def _get_block(inp): + if len(str(inp)) < 64: + return get_block_at_height(inp) + else: + return json.loads(make_request( + 'https://blockchain.info/rawblock/'+inp)) + + +def get_block_header_data(inp): + j = _get_block(inp) + return { + 'version': j['ver'], + 'hash': j['hash'], + 'prevhash': j['prev_block'], + 'timestamp': j['time'], + 'merkle_root': j['mrkl_root'], + 'bits': j['bits'], + 'nonce': j['nonce'], + } + +def blockr_get_block_header_data(height, network='btc'): + if network == 'testnet': + blockr_url = "https://tbtc.blockr.io/api/v1/block/raw/" + elif network == 'btc': + blockr_url = "https://btc.blockr.io/api/v1/block/raw/" + else: + raise Exception( + 'Unsupported network {0} for blockr_get_block_header_data'.format(network)) + + k = json.loads(make_request(blockr_url + str(height))) + j = k['data'] + return { + 'version': j['version'], + 'hash': j['hash'], + 'prevhash': j['previousblockhash'], + 'timestamp': j['time'], + 'merkle_root': j['merkleroot'], + 'bits': int(j['bits'], 16), + 'nonce': j['nonce'], + } + +def get_txs_in_block(inp): + j = _get_block(inp) + hashes = [t['hash'] for t in j['tx']] + return hashes + + +def get_block_height(txhash): + j = json.loads(make_request('https://blockchain.info/rawtx/'+txhash)) + return j['block_height'] diff --git a/src/lib/pybitcointools/bitcoin/blocks.py b/src/lib/pybitcointools/bitcoin/blocks.py new file mode 100644 index 00000000..9df6b35c --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/blocks.py @@ -0,0 +1,50 @@ +from .main import * + + +def serialize_header(inp): + o = encode(inp['version'], 256, 4)[::-1] + \ + inp['prevhash'].decode('hex')[::-1] + \ + inp['merkle_root'].decode('hex')[::-1] + \ + encode(inp['timestamp'], 256, 4)[::-1] + \ + encode(inp['bits'], 256, 4)[::-1] + \ + encode(inp['nonce'], 256, 4)[::-1] + h = bin_sha256(bin_sha256(o))[::-1].encode('hex') + assert h == inp['hash'], (sha256(o), inp['hash']) + return o.encode('hex') + + +def deserialize_header(inp): + inp = inp.decode('hex') + return { + "version": decode(inp[:4][::-1], 256), + "prevhash": inp[4:36][::-1].encode('hex'), + "merkle_root": inp[36:68][::-1].encode('hex'), + "timestamp": decode(inp[68:72][::-1], 256), + "bits": decode(inp[72:76][::-1], 256), + "nonce": decode(inp[76:80][::-1], 256), + "hash": bin_sha256(bin_sha256(inp))[::-1].encode('hex') + } + + +def mk_merkle_proof(header, hashes, index): + nodes = [h.decode('hex')[::-1] for h in hashes] + if len(nodes) % 2 and len(nodes) > 2: + nodes.append(nodes[-1]) + layers = [nodes] + while len(nodes) > 1: + newnodes = [] + for i in range(0, len(nodes) - 1, 2): + newnodes.append(bin_sha256(bin_sha256(nodes[i] + nodes[i+1]))) + if len(newnodes) % 2 and len(newnodes) > 2: + newnodes.append(newnodes[-1]) + nodes = newnodes + layers.append(nodes) + # Sanity check, make sure merkle root is valid + assert nodes[0][::-1].encode('hex') == header['merkle_root'] + merkle_siblings = \ + [layers[i][(index >> i) ^ 1] for i in range(len(layers)-1)] + return { + "hash": hashes[index], + "siblings": [x[::-1].encode('hex') for x in merkle_siblings], + "header": header + } diff --git a/src/lib/pybitcointools/bitcoin/composite.py b/src/lib/pybitcointools/bitcoin/composite.py new file mode 100644 index 00000000..0df0e079 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/composite.py @@ -0,0 +1,128 @@ +from .main import * +from .transaction import * +from .bci import * +from .deterministic import * +from .blocks import * + + +# Takes privkey, address, value (satoshis), fee (satoshis) +def send(frm, to, value, fee=10000): + return sendmultitx(frm, to + ":" + str(value), fee) + + +# Takes privkey, "address1:value1,address2:value2" (satoshis), fee (satoshis) +def sendmultitx(frm, tovalues, fee=10000, **kwargs): + tv, fee = args[:-1], int(args[-1]) + outs = [] + outvalue = 0 + for a in tv: + outs.append(a) + outvalue += int(a.split(":")[1]) + + u = unspent(privtoaddr(frm), **kwargs) + u2 = select(u, int(outvalue)+int(fee)) + argz = u2 + outs + [frm, fee] + tx = mksend(*argz) + tx2 = signall(tx, frm) + return pushtx(tx2, **kwargs) + + +# Takes address, address, value (satoshis), fee(satoshis) +def preparetx(frm, to, value, fee=10000, **kwargs): + tovalues = to + ":" + str(value) + return preparemultitx(frm, tovalues, fee, **kwargs) + + +# Takes address, address:value, address:value ... (satoshis), fee(satoshis) +def preparemultitx(frm, *args, **kwargs): + tv, fee = args[:-1], int(args[-1]) + outs = [] + outvalue = 0 + for a in tv: + outs.append(a) + outvalue += int(a.split(":")[1]) + + u = unspent(frm, **kwargs) + u2 = select(u, int(outvalue)+int(fee)) + argz = u2 + outs + [frm, fee] + return mksend(*argz) + + +# BIP32 hierarchical deterministic multisig script +def bip32_hdm_script(*args): + if len(args) == 3: + keys, req, path = args + else: + i, keys, path = 0, [], [] + while len(args[i]) > 40: + keys.append(args[i]) + i += 1 + req = int(args[i]) + path = map(int, args[i+1:]) + pubs = sorted(map(lambda x: bip32_descend(x, path), keys)) + return mk_multisig_script(pubs, req) + + +# BIP32 hierarchical deterministic multisig address +def bip32_hdm_addr(*args): + return scriptaddr(bip32_hdm_script(*args)) + + +# Setup a coinvault transaction +def setup_coinvault_tx(tx, script): + txobj = deserialize(tx) + N = deserialize_script(script)[-2] + for inp in txobj["ins"]: + inp["script"] = serialize_script([None] * (N+1) + [script]) + return serialize(txobj) + + +# Sign a coinvault transaction +def sign_coinvault_tx(tx, priv): + pub = privtopub(priv) + txobj = deserialize(tx) + subscript = deserialize_script(txobj['ins'][0]['script']) + oscript = deserialize_script(subscript[-1]) + k, pubs = oscript[0], oscript[1:-2] + for j in range(len(txobj['ins'])): + scr = deserialize_script(txobj['ins'][j]['script']) + for i, p in enumerate(pubs): + if p == pub: + scr[i+1] = multisign(tx, j, subscript[-1], priv) + if len(filter(lambda x: x, scr[1:-1])) >= k: + scr = [None] + filter(lambda x: x, scr[1:-1])[:k] + [scr[-1]] + txobj['ins'][j]['script'] = serialize_script(scr) + return serialize(txobj) + + +# Inspects a transaction +def inspect(tx, **kwargs): + d = deserialize(tx) + isum = 0 + ins = {} + for _in in d['ins']: + h = _in['outpoint']['hash'] + i = _in['outpoint']['index'] + prevout = deserialize(fetchtx(h, **kwargs))['outs'][i] + isum += prevout['value'] + a = script_to_address(prevout['script']) + ins[a] = ins.get(a, 0) + prevout['value'] + outs = [] + osum = 0 + for _out in d['outs']: + outs.append({'address': script_to_address(_out['script']), + 'value': _out['value']}) + osum += _out['value'] + return { + 'fee': isum - osum, + 'outs': outs, + 'ins': ins + } + + +def merkle_prove(txhash): + blocknum = str(get_block_height(txhash)) + header = get_block_header_data(blocknum) + hashes = get_txs_in_block(blocknum) + i = hashes.index(txhash) + return mk_merkle_proof(header, hashes, i) diff --git a/src/lib/pybitcointools/bitcoin/deterministic.py b/src/lib/pybitcointools/bitcoin/deterministic.py new file mode 100644 index 00000000..b2bdbbc6 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/deterministic.py @@ -0,0 +1,199 @@ +from .main import * +import hmac +import hashlib +from binascii import hexlify +# Electrum wallets + + +def electrum_stretch(seed): + return slowsha(seed) + +# Accepts seed or stretched seed, returns master public key + + +def electrum_mpk(seed): + if len(seed) == 32: + seed = electrum_stretch(seed) + return privkey_to_pubkey(seed)[2:] + +# Accepts (seed or stretched seed), index and secondary index +# (conventionally 0 for ordinary addresses, 1 for change) , returns privkey + + +def electrum_privkey(seed, n, for_change=0): + if len(seed) == 32: + seed = electrum_stretch(seed) + mpk = electrum_mpk(seed) + offset = dbl_sha256(from_int_representation_to_bytes(n)+b':'+from_int_representation_to_bytes(for_change)+b':'+binascii.unhexlify(mpk)) + return add_privkeys(seed, offset) + +# Accepts (seed or stretched seed or master pubkey), index and secondary index +# (conventionally 0 for ordinary addresses, 1 for change) , returns pubkey + + +def electrum_pubkey(masterkey, n, for_change=0): + if len(masterkey) == 32: + mpk = electrum_mpk(electrum_stretch(masterkey)) + elif len(masterkey) == 64: + mpk = electrum_mpk(masterkey) + else: + mpk = masterkey + bin_mpk = encode_pubkey(mpk, 'bin_electrum') + offset = bin_dbl_sha256(from_int_representation_to_bytes(n)+b':'+from_int_representation_to_bytes(for_change)+b':'+bin_mpk) + return add_pubkeys('04'+mpk, privtopub(offset)) + +# seed/stretched seed/pubkey -> address (convenience method) + + +def electrum_address(masterkey, n, for_change=0, version=0): + return pubkey_to_address(electrum_pubkey(masterkey, n, for_change), version) + +# Given a master public key, a private key from that wallet and its index, +# cracks the secret exponent which can be used to generate all other private +# keys in the wallet + + +def crack_electrum_wallet(mpk, pk, n, for_change=0): + bin_mpk = encode_pubkey(mpk, 'bin_electrum') + offset = dbl_sha256(str(n)+':'+str(for_change)+':'+bin_mpk) + return subtract_privkeys(pk, offset) + +# Below code ASSUMES binary inputs and compressed pubkeys +MAINNET_PRIVATE = b'\x04\x88\xAD\xE4' +MAINNET_PUBLIC = b'\x04\x88\xB2\x1E' +TESTNET_PRIVATE = b'\x04\x35\x83\x94' +TESTNET_PUBLIC = b'\x04\x35\x87\xCF' +PRIVATE = [MAINNET_PRIVATE, TESTNET_PRIVATE] +PUBLIC = [MAINNET_PUBLIC, TESTNET_PUBLIC] + +# BIP32 child key derivation + + +def raw_bip32_ckd(rawtuple, i): + vbytes, depth, fingerprint, oldi, chaincode, key = rawtuple + i = int(i) + + if vbytes in PRIVATE: + priv = key + pub = privtopub(key) + else: + pub = key + + if i >= 2**31: + if vbytes in PUBLIC: + raise Exception("Can't do private derivation on public key!") + I = hmac.new(chaincode, b'\x00'+priv[:32]+encode(i, 256, 4), hashlib.sha512).digest() + else: + I = hmac.new(chaincode, pub+encode(i, 256, 4), hashlib.sha512).digest() + + if vbytes in PRIVATE: + newkey = add_privkeys(I[:32]+B'\x01', priv) + fingerprint = bin_hash160(privtopub(key))[:4] + if vbytes in PUBLIC: + newkey = add_pubkeys(compress(privtopub(I[:32])), key) + fingerprint = bin_hash160(key)[:4] + + return (vbytes, depth + 1, fingerprint, i, I[32:], newkey) + + +def bip32_serialize(rawtuple): + vbytes, depth, fingerprint, i, chaincode, key = rawtuple + i = encode(i, 256, 4) + chaincode = encode(hash_to_int(chaincode), 256, 32) + keydata = b'\x00'+key[:-1] if vbytes in PRIVATE else key + bindata = vbytes + from_int_to_byte(depth % 256) + fingerprint + i + chaincode + keydata + return changebase(bindata+bin_dbl_sha256(bindata)[:4], 256, 58) + + +def bip32_deserialize(data): + dbin = changebase(data, 58, 256) + if bin_dbl_sha256(dbin[:-4])[:4] != dbin[-4:]: + raise Exception("Invalid checksum") + vbytes = dbin[0:4] + depth = from_byte_to_int(dbin[4]) + fingerprint = dbin[5:9] + i = decode(dbin[9:13], 256) + chaincode = dbin[13:45] + key = dbin[46:78]+b'\x01' if vbytes in PRIVATE else dbin[45:78] + return (vbytes, depth, fingerprint, i, chaincode, key) + + +def raw_bip32_privtopub(rawtuple): + vbytes, depth, fingerprint, i, chaincode, key = rawtuple + newvbytes = MAINNET_PUBLIC if vbytes == MAINNET_PRIVATE else TESTNET_PUBLIC + return (newvbytes, depth, fingerprint, i, chaincode, privtopub(key)) + + +def bip32_privtopub(data): + return bip32_serialize(raw_bip32_privtopub(bip32_deserialize(data))) + + +def bip32_ckd(data, i): + return bip32_serialize(raw_bip32_ckd(bip32_deserialize(data), i)) + + +def bip32_master_key(seed, vbytes=MAINNET_PRIVATE): + I = hmac.new(from_string_to_bytes("Bitcoin seed"), seed, hashlib.sha512).digest() + return bip32_serialize((vbytes, 0, b'\x00'*4, 0, I[32:], I[:32]+b'\x01')) + + +def bip32_bin_extract_key(data): + return bip32_deserialize(data)[-1] + + +def bip32_extract_key(data): + return safe_hexlify(bip32_deserialize(data)[-1]) + +# Exploits the same vulnerability as above in Electrum wallets +# Takes a BIP32 pubkey and one of the child privkeys of its corresponding +# privkey and returns the BIP32 privkey associated with that pubkey + + +def raw_crack_bip32_privkey(parent_pub, priv): + vbytes, depth, fingerprint, i, chaincode, key = priv + pvbytes, pdepth, pfingerprint, pi, pchaincode, pkey = parent_pub + i = int(i) + + if i >= 2**31: + raise Exception("Can't crack private derivation!") + + I = hmac.new(pchaincode, pkey+encode(i, 256, 4), hashlib.sha512).digest() + + pprivkey = subtract_privkeys(key, I[:32]+b'\x01') + + newvbytes = MAINNET_PRIVATE if vbytes == MAINNET_PUBLIC else TESTNET_PRIVATE + return (newvbytes, pdepth, pfingerprint, pi, pchaincode, pprivkey) + + +def crack_bip32_privkey(parent_pub, priv): + dsppub = bip32_deserialize(parent_pub) + dspriv = bip32_deserialize(priv) + return bip32_serialize(raw_crack_bip32_privkey(dsppub, dspriv)) + + +def coinvault_pub_to_bip32(*args): + if len(args) == 1: + args = args[0].split(' ') + vals = map(int, args[34:]) + I1 = ''.join(map(chr, vals[:33])) + I2 = ''.join(map(chr, vals[35:67])) + return bip32_serialize((MAINNET_PUBLIC, 0, b'\x00'*4, 0, I2, I1)) + + +def coinvault_priv_to_bip32(*args): + if len(args) == 1: + args = args[0].split(' ') + vals = map(int, args[34:]) + I2 = ''.join(map(chr, vals[35:67])) + I3 = ''.join(map(chr, vals[72:104])) + return bip32_serialize((MAINNET_PRIVATE, 0, b'\x00'*4, 0, I2, I3+b'\x01')) + + +def bip32_descend(*args): + if len(args) == 2 and isinstance(args[1], list): + key, path = args + else: + key, path = args[0], map(int, args[1:]) + for p in path: + key = bip32_ckd(key, p) + return bip32_extract_key(key) diff --git a/src/lib/pybitcointools/bitcoin/main.py b/src/lib/pybitcointools/bitcoin/main.py new file mode 100644 index 00000000..2b8bdd04 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/main.py @@ -0,0 +1,532 @@ +#!/usr/bin/python +from .py2specials import * +from .py3specials import * +import binascii +import hashlib +import re +import sys +import os +import base64 +import time +import random +import hmac +from .ripemd import * + +# Elliptic curve parameters (secp256k1) + +P = 2**256 - 2**32 - 977 +N = 115792089237316195423570985008687907852837564279074904382605163141518161494337 +A = 0 +B = 7 +Gx = 55066263022277343669578718895168534326250603453777594175500187360389116729240 +Gy = 32670510020758816978083085130507043184471273380659243275938904335757337482424 +G = (Gx, Gy) + + +def change_curve(p, n, a, b, gx, gy): + global P, N, A, B, Gx, Gy, G + P, N, A, B, Gx, Gy = p, n, a, b, gx, gy + G = (Gx, Gy) + + +def getG(): + return G + +# Extended Euclidean Algorithm + + +def inv(a, n): + if a == 0: + return 0 + lm, hm = 1, 0 + low, high = a % n, n + while low > 1: + r = high//low + nm, new = hm-lm*r, high-low*r + lm, low, hm, high = nm, new, lm, low + return lm % n + + + +# JSON access (for pybtctool convenience) + + +def access(obj, prop): + if isinstance(obj, dict): + if prop in obj: + return obj[prop] + elif '.' in prop: + return obj[float(prop)] + else: + return obj[int(prop)] + else: + return obj[int(prop)] + + +def multiaccess(obj, prop): + return [access(o, prop) for o in obj] + + +def slice(obj, start=0, end=2**200): + return obj[int(start):int(end)] + + +def count(obj): + return len(obj) + +_sum = sum + + +def sum(obj): + return _sum(obj) + + +def isinf(p): + return p[0] == 0 and p[1] == 0 + + +def to_jacobian(p): + o = (p[0], p[1], 1) + return o + + +def jacobian_double(p): + if not p[1]: + return (0, 0, 0) + ysq = (p[1] ** 2) % P + S = (4 * p[0] * ysq) % P + M = (3 * p[0] ** 2 + A * p[2] ** 4) % P + nx = (M**2 - 2 * S) % P + ny = (M * (S - nx) - 8 * ysq ** 2) % P + nz = (2 * p[1] * p[2]) % P + return (nx, ny, nz) + + +def jacobian_add(p, q): + if not p[1]: + return q + if not q[1]: + return p + U1 = (p[0] * q[2] ** 2) % P + U2 = (q[0] * p[2] ** 2) % P + S1 = (p[1] * q[2] ** 3) % P + S2 = (q[1] * p[2] ** 3) % P + if U1 == U2: + if S1 != S2: + return (0, 0, 1) + return jacobian_double(p) + H = U2 - U1 + R = S2 - S1 + H2 = (H * H) % P + H3 = (H * H2) % P + U1H2 = (U1 * H2) % P + nx = (R ** 2 - H3 - 2 * U1H2) % P + ny = (R * (U1H2 - nx) - S1 * H3) % P + nz = H * p[2] * q[2] + return (nx, ny, nz) + + +def from_jacobian(p): + z = inv(p[2], P) + return ((p[0] * z**2) % P, (p[1] * z**3) % P) + + +def jacobian_multiply(a, n): + if a[1] == 0 or n == 0: + return (0, 0, 1) + if n == 1: + return a + if n < 0 or n >= N: + return jacobian_multiply(a, n % N) + if (n % 2) == 0: + return jacobian_double(jacobian_multiply(a, n//2)) + if (n % 2) == 1: + return jacobian_add(jacobian_double(jacobian_multiply(a, n//2)), a) + + +def fast_multiply(a, n): + return from_jacobian(jacobian_multiply(to_jacobian(a), n)) + + +def fast_add(a, b): + return from_jacobian(jacobian_add(to_jacobian(a), to_jacobian(b))) + +# Functions for handling pubkey and privkey formats + + +def get_pubkey_format(pub): + if is_python2: + two = '\x02' + three = '\x03' + four = '\x04' + else: + two = 2 + three = 3 + four = 4 + + if isinstance(pub, (tuple, list)): return 'decimal' + elif len(pub) == 65 and pub[0] == four: return 'bin' + elif len(pub) == 130 and pub[0:2] == '04': return 'hex' + elif len(pub) == 33 and pub[0] in [two, three]: return 'bin_compressed' + elif len(pub) == 66 and pub[0:2] in ['02', '03']: return 'hex_compressed' + elif len(pub) == 64: return 'bin_electrum' + elif len(pub) == 128: return 'hex_electrum' + else: raise Exception("Pubkey not in recognized format") + + +def encode_pubkey(pub, formt): + if not isinstance(pub, (tuple, list)): + pub = decode_pubkey(pub) + if formt == 'decimal': return pub + elif formt == 'bin': return b'\x04' + encode(pub[0], 256, 32) + encode(pub[1], 256, 32) + elif formt == 'bin_compressed': + return from_int_to_byte(2+(pub[1] % 2)) + encode(pub[0], 256, 32) + elif formt == 'hex': return '04' + encode(pub[0], 16, 64) + encode(pub[1], 16, 64) + elif formt == 'hex_compressed': + return '0'+str(2+(pub[1] % 2)) + encode(pub[0], 16, 64) + elif formt == 'bin_electrum': return encode(pub[0], 256, 32) + encode(pub[1], 256, 32) + elif formt == 'hex_electrum': return encode(pub[0], 16, 64) + encode(pub[1], 16, 64) + else: raise Exception("Invalid format!") + + +def decode_pubkey(pub, formt=None): + if not formt: formt = get_pubkey_format(pub) + if formt == 'decimal': return pub + elif formt == 'bin': return (decode(pub[1:33], 256), decode(pub[33:65], 256)) + elif formt == 'bin_compressed': + x = decode(pub[1:33], 256) + beta = pow(int(x*x*x+A*x+B), int((P+1)//4), int(P)) + y = (P-beta) if ((beta + from_byte_to_int(pub[0])) % 2) else beta + return (x, y) + elif formt == 'hex': return (decode(pub[2:66], 16), decode(pub[66:130], 16)) + elif formt == 'hex_compressed': + return decode_pubkey(safe_from_hex(pub), 'bin_compressed') + elif formt == 'bin_electrum': + return (decode(pub[:32], 256), decode(pub[32:64], 256)) + elif formt == 'hex_electrum': + return (decode(pub[:64], 16), decode(pub[64:128], 16)) + else: raise Exception("Invalid format!") + +def get_privkey_format(priv): + if isinstance(priv, int_types): return 'decimal' + elif len(priv) == 32: return 'bin' + elif len(priv) == 33: return 'bin_compressed' + elif len(priv) == 64: return 'hex' + elif len(priv) == 66: return 'hex_compressed' + else: + bin_p = b58check_to_bin(priv) + if len(bin_p) == 32: return 'wif' + elif len(bin_p) == 33: return 'wif_compressed' + else: raise Exception("WIF does not represent privkey") + +def encode_privkey(priv, formt, vbyte=0): + if not isinstance(priv, int_types): + return encode_privkey(decode_privkey(priv), formt, vbyte) + if formt == 'decimal': return priv + elif formt == 'bin': return encode(priv, 256, 32) + elif formt == 'bin_compressed': return encode(priv, 256, 32)+b'\x01' + elif formt == 'hex': return encode(priv, 16, 64) + elif formt == 'hex_compressed': return encode(priv, 16, 64)+'01' + elif formt == 'wif': + return bin_to_b58check(encode(priv, 256, 32), 128+int(vbyte)) + elif formt == 'wif_compressed': + return bin_to_b58check(encode(priv, 256, 32)+b'\x01', 128+int(vbyte)) + else: raise Exception("Invalid format!") + +def decode_privkey(priv,formt=None): + if not formt: formt = get_privkey_format(priv) + if formt == 'decimal': return priv + elif formt == 'bin': return decode(priv, 256) + elif formt == 'bin_compressed': return decode(priv[:32], 256) + elif formt == 'hex': return decode(priv, 16) + elif formt == 'hex_compressed': return decode(priv[:64], 16) + elif formt == 'wif': return decode(b58check_to_bin(priv),256) + elif formt == 'wif_compressed': + return decode(b58check_to_bin(priv)[:32],256) + else: raise Exception("WIF does not represent privkey") + +def add_pubkeys(p1, p2): + f1, f2 = get_pubkey_format(p1), get_pubkey_format(p2) + return encode_pubkey(fast_add(decode_pubkey(p1, f1), decode_pubkey(p2, f2)), f1) + +def add_privkeys(p1, p2): + f1, f2 = get_privkey_format(p1), get_privkey_format(p2) + return encode_privkey((decode_privkey(p1, f1) + decode_privkey(p2, f2)) % N, f1) + + +def multiply(pubkey, privkey): + f1, f2 = get_pubkey_format(pubkey), get_privkey_format(privkey) + pubkey, privkey = decode_pubkey(pubkey, f1), decode_privkey(privkey, f2) + # http://safecurves.cr.yp.to/twist.html + if not isinf(pubkey) and (pubkey[0]**3+B-pubkey[1]*pubkey[1]) % P != 0: + raise Exception("Point not on curve") + return encode_pubkey(fast_multiply(pubkey, privkey), f1) + + +def divide(pubkey, privkey): + factor = inv(decode_privkey(privkey), N) + return multiply(pubkey, factor) + + +def compress(pubkey): + f = get_pubkey_format(pubkey) + if 'compressed' in f: return pubkey + elif f == 'bin': return encode_pubkey(decode_pubkey(pubkey, f), 'bin_compressed') + elif f == 'hex' or f == 'decimal': + return encode_pubkey(decode_pubkey(pubkey, f), 'hex_compressed') + + +def decompress(pubkey): + f = get_pubkey_format(pubkey) + if 'compressed' not in f: return pubkey + elif f == 'bin_compressed': return encode_pubkey(decode_pubkey(pubkey, f), 'bin') + elif f == 'hex_compressed' or f == 'decimal': + return encode_pubkey(decode_pubkey(pubkey, f), 'hex') + + +def privkey_to_pubkey(privkey): + f = get_privkey_format(privkey) + privkey = decode_privkey(privkey, f) + if privkey >= N: + raise Exception("Invalid privkey") + if f in ['bin', 'bin_compressed', 'hex', 'hex_compressed', 'decimal']: + return encode_pubkey(fast_multiply(G, privkey), f) + else: + return encode_pubkey(fast_multiply(G, privkey), f.replace('wif', 'hex')) + +privtopub = privkey_to_pubkey + + +def privkey_to_address(priv, magicbyte=0): + return pubkey_to_address(privkey_to_pubkey(priv), magicbyte) +privtoaddr = privkey_to_address + + +def neg_pubkey(pubkey): + f = get_pubkey_format(pubkey) + pubkey = decode_pubkey(pubkey, f) + return encode_pubkey((pubkey[0], (P-pubkey[1]) % P), f) + + +def neg_privkey(privkey): + f = get_privkey_format(privkey) + privkey = decode_privkey(privkey, f) + return encode_privkey((N - privkey) % N, f) + +def subtract_pubkeys(p1, p2): + f1, f2 = get_pubkey_format(p1), get_pubkey_format(p2) + k2 = decode_pubkey(p2, f2) + return encode_pubkey(fast_add(decode_pubkey(p1, f1), (k2[0], (P - k2[1]) % P)), f1) + + +def subtract_privkeys(p1, p2): + f1, f2 = get_privkey_format(p1), get_privkey_format(p2) + k2 = decode_privkey(p2, f2) + return encode_privkey((decode_privkey(p1, f1) - k2) % N, f1) + +# Hashes + + +def bin_hash160(string): + intermed = hashlib.sha256(string).digest() + digest = '' + try: + digest = hashlib.new('ripemd160', intermed).digest() + except: + digest = RIPEMD160(intermed).digest() + return digest + + +def hash160(string): + return safe_hexlify(bin_hash160(string)) + + +def bin_sha256(string): + binary_data = string if isinstance(string, bytes) else bytes(string, 'utf-8') + return hashlib.sha256(binary_data).digest() + +def sha256(string): + return bytes_to_hex_string(bin_sha256(string)) + + +def bin_ripemd160(string): + try: + digest = hashlib.new('ripemd160', string).digest() + except: + digest = RIPEMD160(string).digest() + return digest + + +def ripemd160(string): + return safe_hexlify(bin_ripemd160(string)) + + +def bin_dbl_sha256(s): + bytes_to_hash = from_string_to_bytes(s) + return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest() + + +def dbl_sha256(string): + return safe_hexlify(bin_dbl_sha256(string)) + + +def bin_slowsha(string): + string = from_string_to_bytes(string) + orig_input = string + for i in range(100000): + string = hashlib.sha256(string + orig_input).digest() + return string + + +def slowsha(string): + return safe_hexlify(bin_slowsha(string)) + + +def hash_to_int(x): + if len(x) in [40, 64]: + return decode(x, 16) + return decode(x, 256) + + +def num_to_var_int(x): + x = int(x) + if x < 253: return from_int_to_byte(x) + elif x < 65536: return from_int_to_byte(253)+encode(x, 256, 2)[::-1] + elif x < 4294967296: return from_int_to_byte(254) + encode(x, 256, 4)[::-1] + else: return from_int_to_byte(255) + encode(x, 256, 8)[::-1] + + +# WTF, Electrum? +def electrum_sig_hash(message): + padded = b"\x18Bitcoin Signed Message:\n" + num_to_var_int(len(message)) + from_string_to_bytes(message) + return bin_dbl_sha256(padded) + + +def random_key(): + # Gotta be secure after that java.SecureRandom fiasco... + entropy = random_string(32) \ + + str(random.randrange(2**256)) \ + + str(int(time.time() * 1000000)) + return sha256(entropy) + + +def random_electrum_seed(): + entropy = os.urandom(32) \ + + str(random.randrange(2**256)) \ + + str(int(time.time() * 1000000)) + return sha256(entropy)[:32] + +# Encodings + +def b58check_to_bin(inp): + leadingzbytes = len(re.match('^1*', inp).group(0)) + data = b'\x00' * leadingzbytes + changebase(inp, 58, 256) + assert bin_dbl_sha256(data[:-4])[:4] == data[-4:] + return data[1:-4] + + +def get_version_byte(inp): + leadingzbytes = len(re.match('^1*', inp).group(0)) + data = b'\x00' * leadingzbytes + changebase(inp, 58, 256) + assert bin_dbl_sha256(data[:-4])[:4] == data[-4:] + return ord(data[0]) + + +def hex_to_b58check(inp, magicbyte=0): + return bin_to_b58check(binascii.unhexlify(inp), magicbyte) + + +def b58check_to_hex(inp): + return safe_hexlify(b58check_to_bin(inp)) + + +def pubkey_to_address(pubkey, magicbyte=0): + if isinstance(pubkey, (list, tuple)): + pubkey = encode_pubkey(pubkey, 'bin') + if len(pubkey) in [66, 130]: + return bin_to_b58check( + bin_hash160(binascii.unhexlify(pubkey)), magicbyte) + return bin_to_b58check(bin_hash160(pubkey), magicbyte) + +pubtoaddr = pubkey_to_address + +# EDCSA + + +def encode_sig(v, r, s): + vb, rb, sb = from_int_to_byte(v), encode(r, 256), encode(s, 256) + + result = base64.b64encode(vb+b'\x00'*(32-len(rb))+rb+b'\x00'*(32-len(sb))+sb) + return result if is_python2 else str(result, 'utf-8') + + +def decode_sig(sig): + bytez = base64.b64decode(sig) + return from_byte_to_int(bytez[0]), decode(bytez[1:33], 256), decode(bytez[33:], 256) + +# https://tools.ietf.org/html/rfc6979#section-3.2 + + +def deterministic_generate_k(msghash, priv): + v = b'\x01' * 32 + k = b'\x00' * 32 + priv = encode_privkey(priv, 'bin') + msghash = encode(hash_to_int(msghash), 256, 32) + k = hmac.new(k, v+b'\x00'+priv+msghash, hashlib.sha256).digest() + v = hmac.new(k, v, hashlib.sha256).digest() + k = hmac.new(k, v+b'\x01'+priv+msghash, hashlib.sha256).digest() + v = hmac.new(k, v, hashlib.sha256).digest() + return decode(hmac.new(k, v, hashlib.sha256).digest(), 256) + + +def ecdsa_raw_sign(msghash, priv): + + z = hash_to_int(msghash) + k = deterministic_generate_k(msghash, priv) + + r, y = fast_multiply(G, k) + s = inv(k, N) * (z + r*decode_privkey(priv)) % N + + return 27+(y % 2), r, s + + +def ecdsa_sign(msg, priv): + return encode_sig(*ecdsa_raw_sign(electrum_sig_hash(msg), priv)) + + +def ecdsa_raw_verify(msghash, vrs, pub): + v, r, s = vrs + + w = inv(s, N) + z = hash_to_int(msghash) + + u1, u2 = z*w % N, r*w % N + x, y = fast_add(fast_multiply(G, u1), fast_multiply(decode_pubkey(pub), u2)) + + return r == x + + +def ecdsa_verify(msg, sig, pub): + return ecdsa_raw_verify(electrum_sig_hash(msg), decode_sig(sig), pub) + + +def ecdsa_raw_recover(msghash, vrs): + v, r, s = vrs + + x = r + beta = pow(x*x*x+A*x+B, (P+1)//4, P) + y = beta if v % 2 ^ beta % 2 else (P - beta) + z = hash_to_int(msghash) + Gz = jacobian_multiply((Gx, Gy, 1), (N - z) % N) + XY = jacobian_multiply((x, y, 1), s) + Qr = jacobian_add(Gz, XY) + Q = jacobian_multiply(Qr, inv(r, N)) + Q = from_jacobian(Q) + + if ecdsa_raw_verify(msghash, vrs, Q): + return Q + return False + + +def ecdsa_recover(msg, sig): + return encode_pubkey(ecdsa_raw_recover(electrum_sig_hash(msg), decode_sig(sig)), 'hex') diff --git a/src/lib/pybitcointools/bitcoin/py2specials.py b/src/lib/pybitcointools/bitcoin/py2specials.py new file mode 100644 index 00000000..4e2e42bb --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/py2specials.py @@ -0,0 +1,94 @@ +import sys, re +import binascii +import os +import hashlib + + +if sys.version_info.major == 2: + string_types = (str, unicode) + string_or_bytes_types = string_types + int_types = (int, float, long) + + # Base switching + code_strings = { + 2: '01', + 10: '0123456789', + 16: '0123456789abcdef', + 32: 'abcdefghijklmnopqrstuvwxyz234567', + 58: '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz', + 256: ''.join([chr(x) for x in range(256)]) + } + + def bin_dbl_sha256(s): + bytes_to_hash = from_string_to_bytes(s) + return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest() + + def lpad(msg, symbol, length): + if len(msg) >= length: + return msg + return symbol * (length - len(msg)) + msg + + def get_code_string(base): + if base in code_strings: + return code_strings[base] + else: + raise ValueError("Invalid base!") + + def changebase(string, frm, to, minlen=0): + if frm == to: + return lpad(string, get_code_string(frm)[0], minlen) + return encode(decode(string, frm), to, minlen) + + def bin_to_b58check(inp, magicbyte=0): + inp_fmtd = chr(int(magicbyte)) + inp + leadingzbytes = len(re.match('^\x00*', inp_fmtd).group(0)) + checksum = bin_dbl_sha256(inp_fmtd)[:4] + return '1' * leadingzbytes + changebase(inp_fmtd+checksum, 256, 58) + + def bytes_to_hex_string(b): + return b.encode('hex') + + def safe_from_hex(s): + return s.decode('hex') + + def from_int_representation_to_bytes(a): + return str(a) + + def from_int_to_byte(a): + return chr(a) + + def from_byte_to_int(a): + return ord(a) + + def from_bytes_to_string(s): + return s + + def from_string_to_bytes(a): + return a + + def safe_hexlify(a): + return binascii.hexlify(a) + + def encode(val, base, minlen=0): + base, minlen = int(base), int(minlen) + code_string = get_code_string(base) + result = "" + while val > 0: + result = code_string[val % base] + result + val //= base + return code_string[0] * max(minlen - len(result), 0) + result + + def decode(string, base): + base = int(base) + code_string = get_code_string(base) + result = 0 + if base == 16: + string = string.lower() + while len(string) > 0: + result *= base + result += code_string.find(string[0]) + string = string[1:] + return result + + def random_string(x): + return os.urandom(x) diff --git a/src/lib/pybitcointools/bitcoin/py3specials.py b/src/lib/pybitcointools/bitcoin/py3specials.py new file mode 100644 index 00000000..be234722 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/py3specials.py @@ -0,0 +1,119 @@ +import sys, os +import binascii +import hashlib + + +if sys.version_info.major == 3: + string_types = (str) + string_or_bytes_types = (str, bytes) + int_types = (int, float) + # Base switching + code_strings = { + 2: '01', + 10: '0123456789', + 16: '0123456789abcdef', + 32: 'abcdefghijklmnopqrstuvwxyz234567', + 58: '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz', + 256: ''.join([chr(x) for x in range(256)]) + } + + def bin_dbl_sha256(s): + bytes_to_hash = from_string_to_bytes(s) + return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest() + + def lpad(msg, symbol, length): + if len(msg) >= length: + return msg + return symbol * (length - len(msg)) + msg + + def get_code_string(base): + if base in code_strings: + return code_strings[base] + else: + raise ValueError("Invalid base!") + + def changebase(string, frm, to, minlen=0): + if frm == to: + return lpad(string, get_code_string(frm)[0], minlen) + return encode(decode(string, frm), to, minlen) + + def bin_to_b58check(inp, magicbyte=0): + inp_fmtd = from_int_to_byte(int(magicbyte))+inp + + leadingzbytes = 0 + for x in inp_fmtd: + if x != 0: + break + leadingzbytes += 1 + + checksum = bin_dbl_sha256(inp_fmtd)[:4] + return '1' * leadingzbytes + changebase(inp_fmtd+checksum, 256, 58) + + def bytes_to_hex_string(b): + if isinstance(b, str): + return b + + return ''.join('{:02x}'.format(y) for y in b) + + def safe_from_hex(s): + return bytes.fromhex(s) + + def from_int_representation_to_bytes(a): + return bytes(str(a), 'utf-8') + + def from_int_to_byte(a): + return bytes([a]) + + def from_byte_to_int(a): + return a + + def from_string_to_bytes(a): + return a if isinstance(a, bytes) else bytes(a, 'utf-8') + + def safe_hexlify(a): + return str(binascii.hexlify(a), 'utf-8') + + def encode(val, base, minlen=0): + base, minlen = int(base), int(minlen) + code_string = get_code_string(base) + result_bytes = bytes() + while val > 0: + curcode = code_string[val % base] + result_bytes = bytes([ord(curcode)]) + result_bytes + val //= base + + pad_size = minlen - len(result_bytes) + + padding_element = b'\x00' if base == 256 else b'1' \ + if base == 58 else b'0' + if (pad_size > 0): + result_bytes = padding_element*pad_size + result_bytes + + result_string = ''.join([chr(y) for y in result_bytes]) + result = result_bytes if base == 256 else result_string + + return result + + def decode(string, base): + if base == 256 and isinstance(string, str): + string = bytes(bytearray.fromhex(string)) + base = int(base) + code_string = get_code_string(base) + result = 0 + if base == 256: + def extract(d, cs): + return d + else: + def extract(d, cs): + return cs.find(d if isinstance(d, str) else chr(d)) + + if base == 16: + string = string.lower() + while len(string) > 0: + result *= base + result += extract(string[0], code_string) + string = string[1:] + return result + + def random_string(x): + return str(os.urandom(x)) diff --git a/src/lib/pybitcointools/bitcoin/ripemd.py b/src/lib/pybitcointools/bitcoin/ripemd.py new file mode 100644 index 00000000..4b0c6045 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/ripemd.py @@ -0,0 +1,414 @@ +## ripemd.py - pure Python implementation of the RIPEMD-160 algorithm. +## Bjorn Edstrom 16 december 2007. +## +## Copyrights +## ========== +## +## This code is a derived from an implementation by Markus Friedl which is +## subject to the following license. This Python implementation is not +## subject to any other license. +## +##/* +## * Copyright (c) 2001 Markus Friedl. All rights reserved. +## * +## * Redistribution and use in source and binary forms, with or without +## * modification, are permitted provided that the following conditions +## * are met: +## * 1. Redistributions of source code must retain the above copyright +## * notice, this list of conditions and the following disclaimer. +## * 2. Redistributions in binary form must reproduce the above copyright +## * notice, this list of conditions and the following disclaimer in the +## * documentation and/or other materials provided with the distribution. +## * +## * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +## * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +## * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +## * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +## * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +## * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +## * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +## * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +## * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +## * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +## */ +##/* +## * Preneel, Bosselaers, Dobbertin, "The Cryptographic Hash Function RIPEMD-160", +## * RSA Laboratories, CryptoBytes, Volume 3, Number 2, Autumn 1997, +## * ftp://ftp.rsasecurity.com/pub/cryptobytes/crypto3n2.pdf +## */ + +try: + import psyco + psyco.full() +except ImportError: + pass + +import sys + +is_python2 = sys.version_info.major == 2 +#block_size = 1 +digest_size = 20 +digestsize = 20 + +try: + range = xrange +except: + pass + +class RIPEMD160: + """Return a new RIPEMD160 object. An optional string argument + may be provided; if present, this string will be automatically + hashed.""" + + def __init__(self, arg=None): + self.ctx = RMDContext() + if arg: + self.update(arg) + self.dig = None + + def update(self, arg): + """update(arg)""" + RMD160Update(self.ctx, arg, len(arg)) + self.dig = None + + def digest(self): + """digest()""" + if self.dig: + return self.dig + ctx = self.ctx.copy() + self.dig = RMD160Final(self.ctx) + self.ctx = ctx + return self.dig + + def hexdigest(self): + """hexdigest()""" + dig = self.digest() + hex_digest = '' + for d in dig: + if (is_python2): + hex_digest += '%02x' % ord(d) + else: + hex_digest += '%02x' % d + return hex_digest + + def copy(self): + """copy()""" + import copy + return copy.deepcopy(self) + + + +def new(arg=None): + """Return a new RIPEMD160 object. An optional string argument + may be provided; if present, this string will be automatically + hashed.""" + return RIPEMD160(arg) + + + +# +# Private. +# + +class RMDContext: + def __init__(self): + self.state = [0x67452301, 0xEFCDAB89, 0x98BADCFE, + 0x10325476, 0xC3D2E1F0] # uint32 + self.count = 0 # uint64 + self.buffer = [0]*64 # uchar + def copy(self): + ctx = RMDContext() + ctx.state = self.state[:] + ctx.count = self.count + ctx.buffer = self.buffer[:] + return ctx + +K0 = 0x00000000 +K1 = 0x5A827999 +K2 = 0x6ED9EBA1 +K3 = 0x8F1BBCDC +K4 = 0xA953FD4E + +KK0 = 0x50A28BE6 +KK1 = 0x5C4DD124 +KK2 = 0x6D703EF3 +KK3 = 0x7A6D76E9 +KK4 = 0x00000000 + +def ROL(n, x): + return ((x << n) & 0xffffffff) | (x >> (32 - n)) + +def F0(x, y, z): + return x ^ y ^ z + +def F1(x, y, z): + return (x & y) | (((~x) % 0x100000000) & z) + +def F2(x, y, z): + return (x | ((~y) % 0x100000000)) ^ z + +def F3(x, y, z): + return (x & z) | (((~z) % 0x100000000) & y) + +def F4(x, y, z): + return x ^ (y | ((~z) % 0x100000000)) + +def R(a, b, c, d, e, Fj, Kj, sj, rj, X): + a = ROL(sj, (a + Fj(b, c, d) + X[rj] + Kj) % 0x100000000) + e + c = ROL(10, c) + return a % 0x100000000, c + +PADDING = [0x80] + [0]*63 + +import sys +import struct + +def RMD160Transform(state, block): #uint32 state[5], uchar block[64] + x = [0]*16 + if sys.byteorder == 'little': + if is_python2: + x = struct.unpack('<16L', ''.join([chr(x) for x in block[0:64]])) + else: + x = struct.unpack('<16L', bytes(block[0:64])) + else: + raise "Error!!" + a = state[0] + b = state[1] + c = state[2] + d = state[3] + e = state[4] + + #/* Round 1 */ + a, c = R(a, b, c, d, e, F0, K0, 11, 0, x); + e, b = R(e, a, b, c, d, F0, K0, 14, 1, x); + d, a = R(d, e, a, b, c, F0, K0, 15, 2, x); + c, e = R(c, d, e, a, b, F0, K0, 12, 3, x); + b, d = R(b, c, d, e, a, F0, K0, 5, 4, x); + a, c = R(a, b, c, d, e, F0, K0, 8, 5, x); + e, b = R(e, a, b, c, d, F0, K0, 7, 6, x); + d, a = R(d, e, a, b, c, F0, K0, 9, 7, x); + c, e = R(c, d, e, a, b, F0, K0, 11, 8, x); + b, d = R(b, c, d, e, a, F0, K0, 13, 9, x); + a, c = R(a, b, c, d, e, F0, K0, 14, 10, x); + e, b = R(e, a, b, c, d, F0, K0, 15, 11, x); + d, a = R(d, e, a, b, c, F0, K0, 6, 12, x); + c, e = R(c, d, e, a, b, F0, K0, 7, 13, x); + b, d = R(b, c, d, e, a, F0, K0, 9, 14, x); + a, c = R(a, b, c, d, e, F0, K0, 8, 15, x); #/* #15 */ + #/* Round 2 */ + e, b = R(e, a, b, c, d, F1, K1, 7, 7, x); + d, a = R(d, e, a, b, c, F1, K1, 6, 4, x); + c, e = R(c, d, e, a, b, F1, K1, 8, 13, x); + b, d = R(b, c, d, e, a, F1, K1, 13, 1, x); + a, c = R(a, b, c, d, e, F1, K1, 11, 10, x); + e, b = R(e, a, b, c, d, F1, K1, 9, 6, x); + d, a = R(d, e, a, b, c, F1, K1, 7, 15, x); + c, e = R(c, d, e, a, b, F1, K1, 15, 3, x); + b, d = R(b, c, d, e, a, F1, K1, 7, 12, x); + a, c = R(a, b, c, d, e, F1, K1, 12, 0, x); + e, b = R(e, a, b, c, d, F1, K1, 15, 9, x); + d, a = R(d, e, a, b, c, F1, K1, 9, 5, x); + c, e = R(c, d, e, a, b, F1, K1, 11, 2, x); + b, d = R(b, c, d, e, a, F1, K1, 7, 14, x); + a, c = R(a, b, c, d, e, F1, K1, 13, 11, x); + e, b = R(e, a, b, c, d, F1, K1, 12, 8, x); #/* #31 */ + #/* Round 3 */ + d, a = R(d, e, a, b, c, F2, K2, 11, 3, x); + c, e = R(c, d, e, a, b, F2, K2, 13, 10, x); + b, d = R(b, c, d, e, a, F2, K2, 6, 14, x); + a, c = R(a, b, c, d, e, F2, K2, 7, 4, x); + e, b = R(e, a, b, c, d, F2, K2, 14, 9, x); + d, a = R(d, e, a, b, c, F2, K2, 9, 15, x); + c, e = R(c, d, e, a, b, F2, K2, 13, 8, x); + b, d = R(b, c, d, e, a, F2, K2, 15, 1, x); + a, c = R(a, b, c, d, e, F2, K2, 14, 2, x); + e, b = R(e, a, b, c, d, F2, K2, 8, 7, x); + d, a = R(d, e, a, b, c, F2, K2, 13, 0, x); + c, e = R(c, d, e, a, b, F2, K2, 6, 6, x); + b, d = R(b, c, d, e, a, F2, K2, 5, 13, x); + a, c = R(a, b, c, d, e, F2, K2, 12, 11, x); + e, b = R(e, a, b, c, d, F2, K2, 7, 5, x); + d, a = R(d, e, a, b, c, F2, K2, 5, 12, x); #/* #47 */ + #/* Round 4 */ + c, e = R(c, d, e, a, b, F3, K3, 11, 1, x); + b, d = R(b, c, d, e, a, F3, K3, 12, 9, x); + a, c = R(a, b, c, d, e, F3, K3, 14, 11, x); + e, b = R(e, a, b, c, d, F3, K3, 15, 10, x); + d, a = R(d, e, a, b, c, F3, K3, 14, 0, x); + c, e = R(c, d, e, a, b, F3, K3, 15, 8, x); + b, d = R(b, c, d, e, a, F3, K3, 9, 12, x); + a, c = R(a, b, c, d, e, F3, K3, 8, 4, x); + e, b = R(e, a, b, c, d, F3, K3, 9, 13, x); + d, a = R(d, e, a, b, c, F3, K3, 14, 3, x); + c, e = R(c, d, e, a, b, F3, K3, 5, 7, x); + b, d = R(b, c, d, e, a, F3, K3, 6, 15, x); + a, c = R(a, b, c, d, e, F3, K3, 8, 14, x); + e, b = R(e, a, b, c, d, F3, K3, 6, 5, x); + d, a = R(d, e, a, b, c, F3, K3, 5, 6, x); + c, e = R(c, d, e, a, b, F3, K3, 12, 2, x); #/* #63 */ + #/* Round 5 */ + b, d = R(b, c, d, e, a, F4, K4, 9, 4, x); + a, c = R(a, b, c, d, e, F4, K4, 15, 0, x); + e, b = R(e, a, b, c, d, F4, K4, 5, 5, x); + d, a = R(d, e, a, b, c, F4, K4, 11, 9, x); + c, e = R(c, d, e, a, b, F4, K4, 6, 7, x); + b, d = R(b, c, d, e, a, F4, K4, 8, 12, x); + a, c = R(a, b, c, d, e, F4, K4, 13, 2, x); + e, b = R(e, a, b, c, d, F4, K4, 12, 10, x); + d, a = R(d, e, a, b, c, F4, K4, 5, 14, x); + c, e = R(c, d, e, a, b, F4, K4, 12, 1, x); + b, d = R(b, c, d, e, a, F4, K4, 13, 3, x); + a, c = R(a, b, c, d, e, F4, K4, 14, 8, x); + e, b = R(e, a, b, c, d, F4, K4, 11, 11, x); + d, a = R(d, e, a, b, c, F4, K4, 8, 6, x); + c, e = R(c, d, e, a, b, F4, K4, 5, 15, x); + b, d = R(b, c, d, e, a, F4, K4, 6, 13, x); #/* #79 */ + + aa = a; + bb = b; + cc = c; + dd = d; + ee = e; + + a = state[0] + b = state[1] + c = state[2] + d = state[3] + e = state[4] + + #/* Parallel round 1 */ + a, c = R(a, b, c, d, e, F4, KK0, 8, 5, x) + e, b = R(e, a, b, c, d, F4, KK0, 9, 14, x) + d, a = R(d, e, a, b, c, F4, KK0, 9, 7, x) + c, e = R(c, d, e, a, b, F4, KK0, 11, 0, x) + b, d = R(b, c, d, e, a, F4, KK0, 13, 9, x) + a, c = R(a, b, c, d, e, F4, KK0, 15, 2, x) + e, b = R(e, a, b, c, d, F4, KK0, 15, 11, x) + d, a = R(d, e, a, b, c, F4, KK0, 5, 4, x) + c, e = R(c, d, e, a, b, F4, KK0, 7, 13, x) + b, d = R(b, c, d, e, a, F4, KK0, 7, 6, x) + a, c = R(a, b, c, d, e, F4, KK0, 8, 15, x) + e, b = R(e, a, b, c, d, F4, KK0, 11, 8, x) + d, a = R(d, e, a, b, c, F4, KK0, 14, 1, x) + c, e = R(c, d, e, a, b, F4, KK0, 14, 10, x) + b, d = R(b, c, d, e, a, F4, KK0, 12, 3, x) + a, c = R(a, b, c, d, e, F4, KK0, 6, 12, x) #/* #15 */ + #/* Parallel round 2 */ + e, b = R(e, a, b, c, d, F3, KK1, 9, 6, x) + d, a = R(d, e, a, b, c, F3, KK1, 13, 11, x) + c, e = R(c, d, e, a, b, F3, KK1, 15, 3, x) + b, d = R(b, c, d, e, a, F3, KK1, 7, 7, x) + a, c = R(a, b, c, d, e, F3, KK1, 12, 0, x) + e, b = R(e, a, b, c, d, F3, KK1, 8, 13, x) + d, a = R(d, e, a, b, c, F3, KK1, 9, 5, x) + c, e = R(c, d, e, a, b, F3, KK1, 11, 10, x) + b, d = R(b, c, d, e, a, F3, KK1, 7, 14, x) + a, c = R(a, b, c, d, e, F3, KK1, 7, 15, x) + e, b = R(e, a, b, c, d, F3, KK1, 12, 8, x) + d, a = R(d, e, a, b, c, F3, KK1, 7, 12, x) + c, e = R(c, d, e, a, b, F3, KK1, 6, 4, x) + b, d = R(b, c, d, e, a, F3, KK1, 15, 9, x) + a, c = R(a, b, c, d, e, F3, KK1, 13, 1, x) + e, b = R(e, a, b, c, d, F3, KK1, 11, 2, x) #/* #31 */ + #/* Parallel round 3 */ + d, a = R(d, e, a, b, c, F2, KK2, 9, 15, x) + c, e = R(c, d, e, a, b, F2, KK2, 7, 5, x) + b, d = R(b, c, d, e, a, F2, KK2, 15, 1, x) + a, c = R(a, b, c, d, e, F2, KK2, 11, 3, x) + e, b = R(e, a, b, c, d, F2, KK2, 8, 7, x) + d, a = R(d, e, a, b, c, F2, KK2, 6, 14, x) + c, e = R(c, d, e, a, b, F2, KK2, 6, 6, x) + b, d = R(b, c, d, e, a, F2, KK2, 14, 9, x) + a, c = R(a, b, c, d, e, F2, KK2, 12, 11, x) + e, b = R(e, a, b, c, d, F2, KK2, 13, 8, x) + d, a = R(d, e, a, b, c, F2, KK2, 5, 12, x) + c, e = R(c, d, e, a, b, F2, KK2, 14, 2, x) + b, d = R(b, c, d, e, a, F2, KK2, 13, 10, x) + a, c = R(a, b, c, d, e, F2, KK2, 13, 0, x) + e, b = R(e, a, b, c, d, F2, KK2, 7, 4, x) + d, a = R(d, e, a, b, c, F2, KK2, 5, 13, x) #/* #47 */ + #/* Parallel round 4 */ + c, e = R(c, d, e, a, b, F1, KK3, 15, 8, x) + b, d = R(b, c, d, e, a, F1, KK3, 5, 6, x) + a, c = R(a, b, c, d, e, F1, KK3, 8, 4, x) + e, b = R(e, a, b, c, d, F1, KK3, 11, 1, x) + d, a = R(d, e, a, b, c, F1, KK3, 14, 3, x) + c, e = R(c, d, e, a, b, F1, KK3, 14, 11, x) + b, d = R(b, c, d, e, a, F1, KK3, 6, 15, x) + a, c = R(a, b, c, d, e, F1, KK3, 14, 0, x) + e, b = R(e, a, b, c, d, F1, KK3, 6, 5, x) + d, a = R(d, e, a, b, c, F1, KK3, 9, 12, x) + c, e = R(c, d, e, a, b, F1, KK3, 12, 2, x) + b, d = R(b, c, d, e, a, F1, KK3, 9, 13, x) + a, c = R(a, b, c, d, e, F1, KK3, 12, 9, x) + e, b = R(e, a, b, c, d, F1, KK3, 5, 7, x) + d, a = R(d, e, a, b, c, F1, KK3, 15, 10, x) + c, e = R(c, d, e, a, b, F1, KK3, 8, 14, x) #/* #63 */ + #/* Parallel round 5 */ + b, d = R(b, c, d, e, a, F0, KK4, 8, 12, x) + a, c = R(a, b, c, d, e, F0, KK4, 5, 15, x) + e, b = R(e, a, b, c, d, F0, KK4, 12, 10, x) + d, a = R(d, e, a, b, c, F0, KK4, 9, 4, x) + c, e = R(c, d, e, a, b, F0, KK4, 12, 1, x) + b, d = R(b, c, d, e, a, F0, KK4, 5, 5, x) + a, c = R(a, b, c, d, e, F0, KK4, 14, 8, x) + e, b = R(e, a, b, c, d, F0, KK4, 6, 7, x) + d, a = R(d, e, a, b, c, F0, KK4, 8, 6, x) + c, e = R(c, d, e, a, b, F0, KK4, 13, 2, x) + b, d = R(b, c, d, e, a, F0, KK4, 6, 13, x) + a, c = R(a, b, c, d, e, F0, KK4, 5, 14, x) + e, b = R(e, a, b, c, d, F0, KK4, 15, 0, x) + d, a = R(d, e, a, b, c, F0, KK4, 13, 3, x) + c, e = R(c, d, e, a, b, F0, KK4, 11, 9, x) + b, d = R(b, c, d, e, a, F0, KK4, 11, 11, x) #/* #79 */ + + t = (state[1] + cc + d) % 0x100000000; + state[1] = (state[2] + dd + e) % 0x100000000; + state[2] = (state[3] + ee + a) % 0x100000000; + state[3] = (state[4] + aa + b) % 0x100000000; + state[4] = (state[0] + bb + c) % 0x100000000; + state[0] = t % 0x100000000; + + pass + + +def RMD160Update(ctx, inp, inplen): + if type(inp) == str: + inp = [ord(i)&0xff for i in inp] + + have = int((ctx.count // 8) % 64) + inplen = int(inplen) + need = 64 - have + ctx.count += 8 * inplen + off = 0 + if inplen >= need: + if have: + for i in range(need): + ctx.buffer[have+i] = inp[i] + RMD160Transform(ctx.state, ctx.buffer) + off = need + have = 0 + while off + 64 <= inplen: + RMD160Transform(ctx.state, inp[off:]) #<--- + off += 64 + if off < inplen: + # memcpy(ctx->buffer + have, input+off, len-off); + for i in range(inplen - off): + ctx.buffer[have+i] = inp[off+i] + +def RMD160Final(ctx): + size = struct.pack("= 2**255: + b1 = '00' + b1 + if s >= 2**255: + b2 = '00' + b2 + left = '02'+encode(len(b1)//2, 16, 2)+b1 + right = '02'+encode(len(b2)//2, 16, 2)+b2 + return '30'+encode(len(left+right)//2, 16, 2)+left+right + + +def der_decode_sig(sig): + leftlen = decode(sig[6:8], 16)*2 + left = sig[8:8+leftlen] + rightlen = decode(sig[10+leftlen:12+leftlen], 16)*2 + right = sig[12+leftlen:12+leftlen+rightlen] + return (None, decode(left, 16), decode(right, 16)) + + +def txhash(tx, hashcode=None): + if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx): + tx = changebase(tx, 16, 256) + if hashcode: + return dbl_sha256(from_string_to_bytes(tx) + encode(int(hashcode), 256, 4)[::-1]) + else: + return safe_hexlify(bin_dbl_sha256(tx)[::-1]) + + +def bin_txhash(tx, hashcode=None): + return binascii.unhexlify(txhash(tx, hashcode)) + + +def ecdsa_tx_sign(tx, priv, hashcode=SIGHASH_ALL): + rawsig = ecdsa_raw_sign(bin_txhash(tx, hashcode), priv) + return der_encode_sig(*rawsig)+encode(hashcode, 16, 2) + + +def ecdsa_tx_verify(tx, sig, pub, hashcode=SIGHASH_ALL): + return ecdsa_raw_verify(bin_txhash(tx, hashcode), der_decode_sig(sig), pub) + + +def ecdsa_tx_recover(tx, sig, hashcode=SIGHASH_ALL): + z = bin_txhash(tx, hashcode) + _, r, s = der_decode_sig(sig) + left = ecdsa_raw_recover(z, (0, r, s)) + right = ecdsa_raw_recover(z, (1, r, s)) + return (encode_pubkey(left, 'hex'), encode_pubkey(right, 'hex')) + +# Scripts + + +def mk_pubkey_script(addr): + # Keep the auxiliary functions around for altcoins' sake + return '76a914' + b58check_to_hex(addr) + '88ac' + + +def mk_scripthash_script(addr): + return 'a914' + b58check_to_hex(addr) + '87' + +# Address representation to output script + + +def address_to_script(addr): + if addr[0] == '3' or addr[0] == '2': + return mk_scripthash_script(addr) + else: + return mk_pubkey_script(addr) + +# Output script to address representation + + +def script_to_address(script, vbyte=0): + if re.match('^[0-9a-fA-F]*$', script): + script = binascii.unhexlify(script) + if script[:3] == b'\x76\xa9\x14' and script[-2:] == b'\x88\xac' and len(script) == 25: + return bin_to_b58check(script[3:-2], vbyte) # pubkey hash addresses + else: + if vbyte in [111, 196]: + # Testnet + scripthash_byte = 196 + else: + scripthash_byte = 5 + # BIP0016 scripthash addresses + return bin_to_b58check(script[2:-1], scripthash_byte) + + +def p2sh_scriptaddr(script, magicbyte=5): + if re.match('^[0-9a-fA-F]*$', script): + script = binascii.unhexlify(script) + return hex_to_b58check(hash160(script), magicbyte) +scriptaddr = p2sh_scriptaddr + + +def deserialize_script(script): + if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script): + return json_changebase(deserialize_script(binascii.unhexlify(script)), + lambda x: safe_hexlify(x)) + out, pos = [], 0 + while pos < len(script): + code = from_byte_to_int(script[pos]) + if code == 0: + out.append(None) + pos += 1 + elif code <= 75: + out.append(script[pos+1:pos+1+code]) + pos += 1 + code + elif code <= 78: + szsz = pow(2, code - 76) + sz = decode(script[pos+szsz: pos:-1], 256) + out.append(script[pos + 1 + szsz:pos + 1 + szsz + sz]) + pos += 1 + szsz + sz + elif code <= 96: + out.append(code - 80) + pos += 1 + else: + out.append(code) + pos += 1 + return out + + +def serialize_script_unit(unit): + if isinstance(unit, int): + if unit < 16: + return from_int_to_byte(unit + 80) + else: + return bytes([unit]) + elif unit is None: + return b'\x00' + else: + if len(unit) <= 75: + return from_int_to_byte(len(unit))+unit + elif len(unit) < 256: + return from_int_to_byte(76)+from_int_to_byte(len(unit))+unit + elif len(unit) < 65536: + return from_int_to_byte(77)+encode(len(unit), 256, 2)[::-1]+unit + else: + return from_int_to_byte(78)+encode(len(unit), 256, 4)[::-1]+unit + + +if is_python2: + def serialize_script(script): + if json_is_base(script, 16): + return binascii.hexlify(serialize_script(json_changebase(script, + lambda x: binascii.unhexlify(x)))) + return ''.join(map(serialize_script_unit, script)) +else: + def serialize_script(script): + if json_is_base(script, 16): + return safe_hexlify(serialize_script(json_changebase(script, + lambda x: binascii.unhexlify(x)))) + + result = bytes() + for b in map(serialize_script_unit, script): + result += b if isinstance(b, bytes) else bytes(b, 'utf-8') + return result + + +def mk_multisig_script(*args): # [pubs],k or pub1,pub2...pub[n],k + if isinstance(args[0], list): + pubs, k = args[0], int(args[1]) + else: + pubs = list(filter(lambda x: len(str(x)) >= 32, args)) + k = int(args[len(pubs)]) + return serialize_script([k]+pubs+[len(pubs)]) + 'ae' + +# Signing and verifying + + +def verify_tx_input(tx, i, script, sig, pub): + if re.match('^[0-9a-fA-F]*$', tx): + tx = binascii.unhexlify(tx) + if re.match('^[0-9a-fA-F]*$', script): + script = binascii.unhexlify(script) + if not re.match('^[0-9a-fA-F]*$', sig): + sig = safe_hexlify(sig) + hashcode = decode(sig[-2:], 16) + modtx = signature_form(tx, int(i), script, hashcode) + return ecdsa_tx_verify(modtx, sig, pub, hashcode) + + +def sign(tx, i, priv, hashcode=SIGHASH_ALL): + i = int(i) + if (not is_python2 and isinstance(re, bytes)) or not re.match('^[0-9a-fA-F]*$', tx): + return binascii.unhexlify(sign(safe_hexlify(tx), i, priv)) + if len(priv) <= 33: + priv = safe_hexlify(priv) + pub = privkey_to_pubkey(priv) + address = pubkey_to_address(pub) + signing_tx = signature_form(tx, i, mk_pubkey_script(address), hashcode) + sig = ecdsa_tx_sign(signing_tx, priv, hashcode) + txobj = deserialize(tx) + txobj["ins"][i]["script"] = serialize_script([sig, pub]) + return serialize(txobj) + + +def signall(tx, priv): + # if priv is a dictionary, assume format is + # { 'txinhash:txinidx' : privkey } + if isinstance(priv, dict): + for e, i in enumerate(deserialize(tx)["ins"]): + k = priv["%s:%d" % (i["outpoint"]["hash"], i["outpoint"]["index"])] + tx = sign(tx, e, k) + else: + for i in range(len(deserialize(tx)["ins"])): + tx = sign(tx, i, priv) + return tx + + +def multisign(tx, i, script, pk, hashcode=SIGHASH_ALL): + if re.match('^[0-9a-fA-F]*$', tx): + tx = binascii.unhexlify(tx) + if re.match('^[0-9a-fA-F]*$', script): + script = binascii.unhexlify(script) + modtx = signature_form(tx, i, script, hashcode) + return ecdsa_tx_sign(modtx, pk, hashcode) + + +def apply_multisignatures(*args): + # tx,i,script,sigs OR tx,i,script,sig1,sig2...,sig[n] + tx, i, script = args[0], int(args[1]), args[2] + sigs = args[3] if isinstance(args[3], list) else list(args[3:]) + + if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script): + script = binascii.unhexlify(script) + sigs = [binascii.unhexlify(x) if x[:2] == '30' else x for x in sigs] + if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx): + return safe_hexlify(apply_multisignatures(binascii.unhexlify(tx), i, script, sigs)) + + txobj = deserialize(tx) + txobj["ins"][i]["script"] = serialize_script([None]+sigs+[script]) + return serialize(txobj) + + +def is_inp(arg): + return len(arg) > 64 or "output" in arg or "outpoint" in arg + + +def mktx(*args): + # [in0, in1...],[out0, out1...] or in0, in1 ... out0 out1 ... + ins, outs = [], [] + for arg in args: + if isinstance(arg, list): + for a in arg: (ins if is_inp(a) else outs).append(a) + else: + (ins if is_inp(arg) else outs).append(arg) + + txobj = {"locktime": 0, "version": 1, "ins": [], "outs": []} + for i in ins: + if isinstance(i, dict) and "outpoint" in i: + txobj["ins"].append(i) + else: + if isinstance(i, dict) and "output" in i: + i = i["output"] + txobj["ins"].append({ + "outpoint": {"hash": i[:64], "index": int(i[65:])}, + "script": "", + "sequence": 4294967295 + }) + for o in outs: + if isinstance(o, string_or_bytes_types): + addr = o[:o.find(':')] + val = int(o[o.find(':')+1:]) + o = {} + if re.match('^[0-9a-fA-F]*$', addr): + o["script"] = addr + else: + o["address"] = addr + o["value"] = val + + outobj = {} + if "address" in o: + outobj["script"] = address_to_script(o["address"]) + elif "script" in o: + outobj["script"] = o["script"] + else: + raise Exception("Could not find 'address' or 'script' in output.") + outobj["value"] = o["value"] + txobj["outs"].append(outobj) + + return serialize(txobj) + + +def select(unspent, value): + value = int(value) + high = [u for u in unspent if u["value"] >= value] + high.sort(key=lambda u: u["value"]) + low = [u for u in unspent if u["value"] < value] + low.sort(key=lambda u: -u["value"]) + if len(high): + return [high[0]] + i, tv = 0, 0 + while tv < value and i < len(low): + tv += low[i]["value"] + i += 1 + if tv < value: + raise Exception("Not enough funds") + return low[:i] + +# Only takes inputs of the form { "output": blah, "value": foo } + + +def mksend(*args): + argz, change, fee = args[:-2], args[-2], int(args[-1]) + ins, outs = [], [] + for arg in argz: + if isinstance(arg, list): + for a in arg: + (ins if is_inp(a) else outs).append(a) + else: + (ins if is_inp(arg) else outs).append(arg) + + isum = sum([i["value"] for i in ins]) + osum, outputs2 = 0, [] + for o in outs: + if isinstance(o, string_types): + o2 = { + "address": o[:o.find(':')], + "value": int(o[o.find(':')+1:]) + } + else: + o2 = o + outputs2.append(o2) + osum += o2["value"] + + if isum < osum+fee: + raise Exception("Not enough money") + elif isum > osum+fee+5430: + outputs2 += [{"address": change, "value": isum-osum-fee}] + + return mktx(ins, outputs2) diff --git a/src/lib/pybitcointools/pybtctool b/src/lib/pybitcointools/pybtctool new file mode 100644 index 00000000..2f6b3dcc --- /dev/null +++ b/src/lib/pybitcointools/pybtctool @@ -0,0 +1,36 @@ +#!/usr/bin/python +import sys, json, re +from bitcoin import * + +if len(sys.argv) == 1: + print "pybtctool ..." +else: + cmdargs, preargs, kwargs = [], [], {} + i = 2 + # Process first arg tag + if sys.argv[1] == '-s': + preargs.extend(re.findall(r'\S\S*', sys.stdin.read())) + elif sys.argv[1] == '-B': + preargs.extend([sys.stdin.read()]) + elif sys.argv[1] == '-b': + preargs.extend([sys.stdin.read()[:-1]]) + elif sys.argv[1] == '-j': + preargs.extend([json.loads(sys.stdin.read())]) + elif sys.argv[1] == '-J': + preargs.extend(json.loads(sys.stdin.read())) + else: + i = 1 + while i < len(sys.argv): + if sys.argv[i][:2] == '--': + kwargs[sys.argv[i][2:]] = sys.argv[i+1] + i += 2 + else: + cmdargs.append(sys.argv[i]) + i += 1 + cmd = cmdargs[0] + args = preargs + cmdargs[1:] + o = vars()[cmd](*args, **kwargs) + if isinstance(o, (list, dict)): + print json.dumps(o) + else: + print o diff --git a/src/lib/pybitcointools/setup.py b/src/lib/pybitcointools/setup.py new file mode 100644 index 00000000..1cd6d1f8 --- /dev/null +++ b/src/lib/pybitcointools/setup.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python +try: + from setuptools import setup +except ImportError: + from distutils.core import setup + +setup(name='bitcoin', + version='1.1.28', + description='Python Bitcoin Tools', + author='Vitalik Buterin', + author_email='vbuterin@gmail.com', + url='http://github.com/vbuterin/pybitcointools', + packages=['bitcoin'], + scripts=['pybtctool'], + include_package_data=True, + data_files=[("", ["LICENSE"])], + ) diff --git a/src/lib/pybitcointools/test.py b/src/lib/pybitcointools/test.py new file mode 100644 index 00000000..2cf415d8 --- /dev/null +++ b/src/lib/pybitcointools/test.py @@ -0,0 +1,534 @@ +import json +import os +import random +import unittest + +import bitcoin.ripemd as ripemd +from bitcoin import * + + +class TestECCArithmetic(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print('Starting ECC arithmetic tests') + + def test_all(self): + for i in range(8): + print('### Round %d' % (i+1)) + x, y = random.randrange(2**256), random.randrange(2**256) + self.assertEqual( + multiply(multiply(G, x), y)[0], + multiply(multiply(G, y), x)[0] + ) + self.assertEqual( + + add_pubkeys(multiply(G, x), multiply(G, y))[0], + multiply(G, add_privkeys(x, y))[0] + ) + + hx, hy = encode(x % N, 16, 64), encode(y % N, 16, 64) + self.assertEqual( + multiply(multiply(G, hx), hy)[0], + multiply(multiply(G, hy), hx)[0] + ) + self.assertEqual( + add_pubkeys(multiply(G, hx), multiply(G, hy))[0], + multiply(G, add_privkeys(hx, hy))[0] + ) + self.assertEqual( + b58check_to_hex(pubtoaddr(privtopub(x))), + b58check_to_hex(pubtoaddr(multiply(G, hx), 23)) + ) + + p = privtopub(sha256(str(x))) + if i % 2 == 1: + p = changebase(p, 16, 256) + self.assertEqual(p, decompress(compress(p))) + self.assertEqual(G[0], multiply(divide(G, x), x)[0]) + + +class TestBases(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print('Starting base change tests') + + def test_all(self): + data = [ + [10, '65535', 16, 'ffff'], + [16, 'deadbeef', 10, '3735928559'], + [10, '0', 16, ''], + [256, b'34567', 10, '219919234615'], + [10, '444', 16, '1bc'], + [256, b'\x03\x04\x05\x06\x07', 10, '12952339975'], + [16, '3132333435', 256, b'12345'] + ] + for prebase, preval, postbase, postval in data: + self.assertEqual(changebase(preval, prebase, postbase), postval) + + for i in range(100): + x = random.randrange(1, 9999999999999999) + frm = random.choice([2, 10, 16, 58, 256]) + to = random.choice([2, 10, 16, 58, 256]) + self.assertEqual(decode(encode(x, to), to), x) + self.assertEqual(changebase(encode(x, frm), frm, to), encode(x, to)) + self.assertEqual(decode(changebase(encode(x, frm), frm, to), to), x) + + +class TestElectrumWalletInternalConsistency(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print('Starting Electrum wallet internal consistency tests') + + def test_all(self): + for i in range(3): + seed = sha256(str(random.randrange(2**40)))[:32] + mpk = electrum_mpk(seed) + for i in range(5): + pk = electrum_privkey(seed, i) + pub = electrum_pubkey((mpk, seed)[i % 2], i) + pub2 = privtopub(pk) + self.assertEqual( + pub, + pub2, + 'Does not match! Details:\nseed: %s\nmpk: %s\npriv: %s\npub: %s\npub2: %s' % ( + seed, mpk, pk, pub, pub2 + ) + ) + + +class TestElectrumSignVerify(unittest.TestCase): + """Requires Electrum.""" + + @classmethod + def setUpClass(cls): + cls.wallet = "/tmp/tempwallet_" + str(random.randrange(2**40)) + print("Starting wallet tests with: " + cls.wallet) + os.popen('echo "\n\n\n\n\n\n" | electrum -w %s create' % cls.wallet).read() + cls.seed = str(json.loads(os.popen("electrum -w %s getseed" % cls.wallet).read())['seed']) + cls.addies = json.loads(os.popen("electrum -w %s listaddresses" % cls.wallet).read()) + + def test_address(self): + for i in range(5): + self.assertEqual( + self.addies[i], + electrum_address(self.seed, i, 0), + "Address does not match! Details:\nseed %s, i: %d" % (self.seed, i) + ) + + def test_sign_verify(self): + print("Electrum-style signing and verification tests, against actual Electrum") + alphabet = "1234567890qwertyuiopasdfghjklzxcvbnm" + for i in range(8): + msg = ''.join([random.choice(alphabet) for i in range(random.randrange(20, 200))]) + addy = random.choice(self.addies) + wif = os.popen('electrum -w %s dumpprivkey %s' % (self.wallet, addy)).readlines()[-2].replace('"', '').strip() + priv = b58check_to_hex(wif) + pub = privtopub(priv) + + sig = os.popen('electrum -w %s signmessage %s %s' % (self.wallet, addy, msg)).readlines()[-1].strip() + self.assertTrue( + ecdsa_verify(msg, sig, pub), + "Verification error. Details:\nmsg: %s\nsig: %s\npriv: %s\naddy: %s\npub: %s" % ( + msg, sig, priv, addy, pub + ) + ) + + rec = ecdsa_recover(msg, sig) + self.assertEqual( + pub, + rec, + "Recovery error. Details:\nmsg: %s\nsig: %s\npriv: %s\naddy: %s\noriginal pub: %s, %s\nrecovered pub: %s" % ( + msg, sig, priv, addy, pub, decode_pubkey(pub, 'hex')[1], rec + ) + ) + + mysig = ecdsa_sign(msg, priv) + self.assertEqual( + os.popen('electrum -w %s verifymessage %s %s %s' % (self.wallet, addy, mysig, msg)).read().strip(), + "true", + "Electrum verify message does not match" + ) + + +class TestTransactionSignVerify(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print("Transaction-style signing and verification tests") + + def test_all(self): + alphabet = "1234567890qwertyuiopasdfghjklzxcvbnm" + for i in range(10): + msg = ''.join([random.choice(alphabet) for i in range(random.randrange(20, 200))]) + priv = sha256(str(random.randrange(2**256))) + pub = privtopub(priv) + sig = ecdsa_tx_sign(msg, priv) + self.assertTrue( + ecdsa_tx_verify(msg, sig, pub), + "Verification error" + ) + + self.assertIn( + pub, + ecdsa_tx_recover(msg, sig), + "Recovery failed" + ) + + +class TestSerialize(unittest.TestCase): + + def test_serialize(self): + tx = '0100000001239f932c780e517015842f3b02ff765fba97f9f63f9f1bc718b686a56ed9c73400000000fd5d010047304402200c40fa58d3f6d5537a343cf9c8d13bc7470baf1d13867e0de3e535cd6b4354c802200f2b48f67494835b060d0b2ff85657d2ba2d9ea4e697888c8cb580e8658183a801483045022056f488c59849a4259e7cef70fe5d6d53a4bd1c59a195b0577bd81cb76044beca022100a735b319fa66af7b178fc719b93f905961ef4d4446deca8757a90de2106dd98a014cc95241046c7d87fd72caeab48e937f2feca9e9a4bd77f0eff4ebb2dbbb9855c023e334e188d32aaec4632ea4cbc575c037d8101aec73d029236e7b1c2380f3e4ad7edced41046fd41cddf3bbda33a240b417a825cc46555949917c7ccf64c59f42fd8dfe95f34fae3b09ed279c8c5b3530510e8cca6230791102eef9961d895e8db54af0563c410488d618b988efd2511fc1f9c03f11c210808852b07fe46128c1a6b1155aa22cdf4b6802460ba593db2d11c7e6cbe19cedef76b7bcabd05d26fd97f4c5a59b225053aeffffffff0310270000000000001976a914a89733100315c37d228a529853af341a9d290a4588ac409c00000000000017a9142b56f9a4009d9ff99b8f97bea4455cd71135f5dd87409c00000000000017a9142b56f9a4009d9ff99b8f97bea4455cd71135f5dd8700000000' + self.assertEqual( + serialize(deserialize(tx)), + tx, + "Serialize roundtrip failed" + ) + + def test_serialize_script(self): + script = '47304402200c40fa58d3f6d5537a343cf9c8d13bc7470baf1d13867e0de3e535cd6b4354c802200f2b48f67494835b060d0b2ff85657d2ba2d9ea4e697888c8cb580e8658183a801483045022056f488c59849a4259e7cef70fe5d6d53a4bd1c59a195b0577bd81cb76044beca022100a735b319fa66af7b178fc719b93f905961ef4d4446deca8757a90de2106dd98a014cc95241046c7d87fd72caeab48e937f2feca9e9a4bd77f0eff4ebb2dbbb9855c023e334e188d32aaec4632ea4cbc575c037d8101aec73d029236e7b1c2380f3e4ad7edced41046fd41cddf3bbda33a240b417a825cc46555949917c7ccf64c59f42fd8dfe95f34fae3b09ed279c8c5b3530510e8cca6230791102eef9961d895e8db54af0563c410488d618b988efd2511fc1f9c03f11c210808852b07fe46128c1a6b1155aa22cdf4b6802460ba593db2d11c7e6cbe19cedef76b7bcabd05d26fd97f4c5a59b225053ae' + self.assertEqual( + serialize_script(deserialize_script(script)), + script, + "Script serialize roundtrip failed" + ) + + +class TestTransaction(unittest.TestCase): + @classmethod + def setUpClass(cls): + print("Attempting transaction creation") + + # FIXME: I don't know how to write this as a unit test. + # What should be asserted? + def test_all(self): + privs = [sha256(str(random.randrange(2**256))) for x in range(4)] + pubs = [privtopub(priv) for priv in privs] + addresses = [pubtoaddr(pub) for pub in pubs] + mscript = mk_multisig_script(pubs[1:], 2, 3) + msigaddr = p2sh_scriptaddr(mscript) + tx = mktx(['01'*32+':1', '23'*32+':2'], [msigaddr+':20202', addresses[0]+':40404']) + tx1 = sign(tx, 1, privs[0]) + + sig1 = multisign(tx, 0, mscript, privs[1]) + self.assertTrue(verify_tx_input(tx1, 0, mscript, sig1, pubs[1]), "Verification Error") + + sig3 = multisign(tx, 0, mscript, privs[3]) + self.assertTrue(verify_tx_input(tx1, 0, mscript, sig3, pubs[3]), "Verification Error") + + tx2 = apply_multisignatures(tx1, 0, mscript, [sig1, sig3]) + print("Outputting transaction: ", tx2) + + # https://github.com/vbuterin/pybitcointools/issues/71 + def test_multisig(self): + script = mk_multisig_script(["0254236f7d1124fc07600ad3eec5ac47393bf963fbf0608bcce255e685580d16d9", + "03560cad89031c412ad8619398bd43b3d673cb5bdcdac1afc46449382c6a8e0b2b"], + 2) + + self.assertEqual(p2sh_scriptaddr(script), "33byJBaS5N45RHFcatTSt9ZjiGb6nK4iV3") + + self.assertEqual(p2sh_scriptaddr(script, 0x05), "33byJBaS5N45RHFcatTSt9ZjiGb6nK4iV3") + self.assertEqual(p2sh_scriptaddr(script, 5), "33byJBaS5N45RHFcatTSt9ZjiGb6nK4iV3") + + self.assertEqual(p2sh_scriptaddr(script, 0xc4), "2MuABMvWTgpZRd4tAG25KW6YzvcoGVZDZYP") + self.assertEqual(p2sh_scriptaddr(script, 196), "2MuABMvWTgpZRd4tAG25KW6YzvcoGVZDZYP") + + +class TestDeterministicGenerate(unittest.TestCase): + @classmethod + def setUpClass(cls): + print("Beginning RFC6979 deterministic signing tests") + + def test_all(self): + # Created with python-ecdsa 0.9 + # Code to make your own vectors: + # class gen: + # def order(self): return 115792089237316195423570985008687907852837564279074904382605163141518161494337 + # dummy = gen() + # for i in range(10): ecdsa.rfc6979.generate_k(dummy, i, hashlib.sha256, hashlib.sha256(str(i)).digest()) + test_vectors = [ + 32783320859482229023646250050688645858316445811207841524283044428614360139869, + 109592113955144883013243055602231029997040992035200230706187150761552110229971, + 65765393578006003630736298397268097590176526363988568884298609868706232621488, + 85563144787585457107933685459469453513056530050186673491900346620874099325918, + 99829559501561741463404068005537785834525504175465914981205926165214632019533, + 7755945018790142325513649272940177083855222863968691658328003977498047013576, + 81516639518483202269820502976089105897400159721845694286620077204726637043798, + 52824159213002398817852821148973968315579759063230697131029801896913602807019, + 44033460667645047622273556650595158811264350043302911918907282441675680538675, + 32396602643737403620316035551493791485834117358805817054817536312402837398361 + ] + + for i, ti in enumerate(test_vectors): + mine = deterministic_generate_k(bin_sha256(str(i)), encode(i, 256, 32)) + self.assertEqual( + ti, + mine, + "Test vector does not match. Details:\n%s\n%s" % ( + ti, + mine + ) + ) + + +class TestBIP0032(unittest.TestCase): + """See: https://en.bitcoin.it/wiki/BIP_0032""" + @classmethod + def setUpClass(cls): + print("Beginning BIP0032 tests") + + def _full_derive(self, key, chain): + if len(chain) == 0: + return key + elif chain[0] == 'pub': + return self._full_derive(bip32_privtopub(key), chain[1:]) + else: + return self._full_derive(bip32_ckd(key, chain[0]), chain[1:]) + + def test_all(self): + test_vectors = [ + [[], 'xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi'], + [['pub'], 'xpub661MyMwAqRbcFtXgS5sYJABqqG9YLmC4Q1Rdap9gSE8NqtwybGhePY2gZ29ESFjqJoCu1Rupje8YtGqsefD265TMg7usUDFdp6W1EGMcet8'], + [[2**31], 'xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6KCesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7'], + [[2**31, 1], 'xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs'], + [[2**31, 1, 2**31 + 2], 'xprv9z4pot5VBttmtdRTWfWQmoH1taj2axGVzFqSb8C9xaxKymcFzXBDptWmT7FwuEzG3ryjH4ktypQSAewRiNMjANTtpgP4mLTj34bhnZX7UiM'], + [[2**31, 1, 2**31 + 2, 'pub', 2, 1000000000], 'xpub6H1LXWLaKsWFhvm6RVpEL9P4KfRZSW7abD2ttkWP3SSQvnyA8FSVqNTEcYFgJS2UaFcxupHiYkro49S8yGasTvXEYBVPamhGW6cFJodrTHy'] + ] + + mk = bip32_master_key(safe_from_hex('000102030405060708090a0b0c0d0e0f')) + + for tv in test_vectors: + left, right = self._full_derive(mk, tv[0]), tv[1] + self.assertEqual( + left, + right, + "Test vector does not match. Details: \n%s\n%s\n\%s" % ( + tv[0], + [x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(left)], + [x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(right)], + ) + ) + + def test_all_testnet(self): + test_vectors = [ + [[], 'tprv8ZgxMBicQKsPeDgjzdC36fs6bMjGApWDNLR9erAXMs5skhMv36j9MV5ecvfavji5khqjWaWSFhN3YcCUUdiKH6isR4Pwy3U5y5egddBr16m'], + [['pub'], 'tpubD6NzVbkrYhZ4XgiXtGrdW5XDAPFCL9h7we1vwNCpn8tGbBcgfVYjXyhWo4E1xkh56hjod1RhGjxbaTLV3X4FyWuejifB9jusQ46QzG87VKp'], + [[2**31], 'tprv8bxNLu25VazNnppTCP4fyhyCvBHcYtzE3wr3cwYeL4HA7yf6TLGEUdS4QC1vLT63TkjRssqJe4CvGNEC8DzW5AoPUw56D1Ayg6HY4oy8QZ9'], + [[2**31, 1], 'tprv8e8VYgZxtHsSdGrtvdxYaSrryZGiYviWzGWtDDKTGh5NMXAEB8gYSCLHpFCywNs5uqV7ghRjimALQJkRFZnUrLHpzi2pGkwqLtbubgWuQ8q'], + [[2**31, 1, 2**31 + 2], 'tprv8gjmbDPpbAirVSezBEMuwSu1Ci9EpUJWKokZTYccSZSomNMLytWyLdtDNHRbucNaRJWWHANf9AzEdWVAqahfyRjVMKbNRhBmxAM8EJr7R15'], + [[2**31, 1, 2**31 + 2, 'pub', 2, 1000000000], 'tpubDHNy3kAG39ThyiwwsgoKY4iRenXDRtce8qdCFJZXPMCJg5dsCUHayp84raLTpvyiNA9sXPob5rgqkKvkN8S7MMyXbnEhGJMW64Cf4vFAoaF'] + ] + + mk = bip32_master_key(safe_from_hex('000102030405060708090a0b0c0d0e0f'), TESTNET_PRIVATE) + + for tv in test_vectors: + left, right = self._full_derive(mk, tv[0]), tv[1] + self.assertEqual( + left, + right, + "Test vector does not match. Details:\n%s\n%s\n%s\n\%s" % ( + left, + tv[0], + [x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(left)], + [x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(right)], + ) + ) + + def test_extra(self): + master = bip32_master_key(safe_from_hex("000102030405060708090a0b0c0d0e0f")) + + # m/0 + assert bip32_ckd(master, "0") == "xprv9uHRZZhbkedL37eZEnyrNsQPFZYRAvjy5rt6M1nbEkLSo378x1CQQLo2xxBvREwiK6kqf7GRNvsNEchwibzXaV6i5GcsgyjBeRguXhKsi4R" + assert bip32_privtopub(bip32_ckd(master, "0")) == "xpub68Gmy5EVb2BdFbj2LpWrk1M7obNuaPTpT5oh9QCCo5sRfqSHVYWex97WpDZzszdzHzxXDAzPLVSwybe4uPYkSk4G3gnrPqqkV9RyNzAcNJ1" + + # m/1 + assert bip32_ckd(master, "1") == "xprv9uHRZZhbkedL4yTpidDvuVfrdUkTbhDHviERRBkbzbNDZeMjWzqzKAdxWhzftGDSxDmBdakjqHiZJbkwiaTEXJdjZAaAjMZEE3PMbMrPJih" + assert bip32_privtopub(bip32_ckd(master, "1")) == "xpub68Gmy5EVb2BdHTYHpekwGdcbBWax19w9HwA2DaADYvuCSSgt4YAErxxSN1KWSnmyqkwRNbnTj3XiUBKmHeC8rTjLRPjSULcDKQQgfgJDppq" + + # m/0/0 + assert bip32_ckd(bip32_ckd(master, "0"), "0") == "xprv9ww7sMFLzJMzur2oEQDB642fbsMS4q6JRraMVTrM9bTWBq7NDS8ZpmsKVB4YF3mZecqax1fjnsPF19xnsJNfRp4RSyexacULXMKowSACTRc" + assert bip32_privtopub(bip32_ckd(bip32_ckd(master, "0"), "0")) == "xpub6AvUGrnEpfvJ8L7GLRkBTByQ9uBvUHp9o5VxHrFxhvzV4dSWkySpNaBoLR9FpbnwRmTa69yLHF3QfcaxbWT7gWdwws5k4dpmJvqpEuMWwnj" + + # m/0' + assert bip32_ckd(master, 2**31) == "xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6KCesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7" + assert bip32_privtopub(bip32_ckd(master, 2**31)) == "xpub68Gmy5EdvgibQVfPdqkBBCHxA5htiqg55crXYuXoQRKfDBFA1WEjWgP6LHhwBZeNK1VTsfTFUHCdrfp1bgwQ9xv5ski8PX9rL2dZXvgGDnw" + + # m/1' + assert bip32_ckd(master, 2**31 + 1) == "xprv9uHRZZhk6KAJFszJGW6LoUFq92uL7FvkBhmYiMurCWPHLJZkX2aGvNdRUBNnJu7nv36WnwCN59uNy6sxLDZvvNSgFz3TCCcKo7iutQzpg78" + assert bip32_privtopub(bip32_ckd(master, 2**31 + 1)) == "xpub68Gmy5EdvgibUN4mNXdMAcCZh4jpWiebYvh9WkKTkqvGD6tu4ZtXUAwuKSyF5DFZVmotf9UHFTGqSXo9qyDBSn47RkaN6Aedt9JbL7zcgSL" + + # m/1' + assert bip32_ckd(master, 1 + 2**31) == "xprv9uHRZZhk6KAJFszJGW6LoUFq92uL7FvkBhmYiMurCWPHLJZkX2aGvNdRUBNnJu7nv36WnwCN59uNy6sxLDZvvNSgFz3TCCcKo7iutQzpg78" + assert bip32_privtopub(bip32_ckd(master, 1 + 2**31)) == "xpub68Gmy5EdvgibUN4mNXdMAcCZh4jpWiebYvh9WkKTkqvGD6tu4ZtXUAwuKSyF5DFZVmotf9UHFTGqSXo9qyDBSn47RkaN6Aedt9JbL7zcgSL" + + # m/0'/0 + assert bip32_ckd(bip32_ckd(master, 2**31), "0") == "xprv9wTYmMFdV23N21MM6dLNavSQV7Sj7meSPXx6AV5eTdqqGLjycVjb115Ec5LgRAXscPZgy5G4jQ9csyyZLN3PZLxoM1h3BoPuEJzsgeypdKj" + assert bip32_privtopub(bip32_ckd(bip32_ckd(master, 2**31), "0")) == "xpub6ASuArnXKPbfEVRpCesNx4P939HDXENHkksgxsVG1yNp9958A33qYoPiTN9QrJmWFa2jNLdK84bWmyqTSPGtApP8P7nHUYwxHPhqmzUyeFG" + + # m/0'/0' + assert bip32_ckd(bip32_ckd(master, 2**31), 2**31) == "xprv9wTYmMFmpgaLB5Hge4YtaGqCKpsYPTD9vXWSsmdZrNU3Y2i4WoBykm6ZteeCLCCZpGxdHQuqEhM6Gdo2X6CVrQiTw6AAneF9WSkA9ewaxtS" + assert bip32_privtopub(bip32_ckd(bip32_ckd(master, 2**31), 2**31)) == "xpub6ASuArnff48dPZN9k65twQmvsri2nuw1HkS3gA3BQi12Qq3D4LWEJZR3jwCAr1NhsFMcQcBkmevmub6SLP37bNq91SEShXtEGUbX3GhNaGk" + + # m/44'/0'/0'/0/0 + assert bip32_ckd(bip32_ckd(bip32_ckd(bip32_ckd(bip32_ckd(master, 44 + 2**31), 2**31), 2**31), 0), 0) == "xprvA4A9CuBXhdBtCaLxwrw64Jaran4n1rgzeS5mjH47Ds8V67uZS8tTkG8jV3BZi83QqYXPcN4v8EjK2Aof4YcEeqLt688mV57gF4j6QZWdP9U" + assert bip32_privtopub(bip32_ckd(bip32_ckd(bip32_ckd(bip32_ckd(bip32_ckd(master, 44 + 2**31), 2**31), 2**31), 0), 0)) == "xpub6H9VcQiRXzkBR4RS3tU6RSXb8ouGRKQr1f1NXfTinCfTxvEhygCiJ4TDLHz1dyQ6d2Vz8Ne7eezkrViwaPo2ZMsNjVtFwvzsQXCDV6HJ3cV" + + +class TestStartingAddressAndScriptGenerationConsistency(unittest.TestCase): + @classmethod + def setUpClass(cls): + print("Starting address and script generation consistency tests") + + def test_all(self): + for i in range(5): + a = privtoaddr(random_key()) + self.assertEqual(a, script_to_address(address_to_script(a))) + self.assertEqual(a, script_to_address(address_to_script(a), 0)) + self.assertEqual(a, script_to_address(address_to_script(a), 0x00)) + + b = privtoaddr(random_key(), 5) + self.assertEqual(b, script_to_address(address_to_script(b))) + self.assertEqual(b, script_to_address(address_to_script(b), 0)) + self.assertEqual(b, script_to_address(address_to_script(b), 0x00)) + self.assertEqual(b, script_to_address(address_to_script(b), 5)) + self.assertEqual(b, script_to_address(address_to_script(b), 0x05)) + + + for i in range(5): + a = privtoaddr(random_key(), 0x6f) + self.assertEqual(a, script_to_address(address_to_script(a), 111)) + self.assertEqual(a, script_to_address(address_to_script(a), 0x6f)) + + b = privtoaddr(random_key(), 0xc4) + self.assertEqual(b, script_to_address(address_to_script(b), 111)) + self.assertEqual(b, script_to_address(address_to_script(b), 0x6f)) + self.assertEqual(b, script_to_address(address_to_script(b), 196)) + self.assertEqual(b, script_to_address(address_to_script(b), 0xc4)) + + +class TestRipeMD160PythonBackup(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print('Testing the pure python backup for ripemd160') + + def test_all(self): + strvec = [ + '', + 'The quick brown fox jumps over the lazy dog', + 'The quick brown fox jumps over the lazy cog', + 'Nobody inspects the spammish repetition' + ] + + target = [ + '9c1185a5c5e9fc54612808977ee8f548b2258d31', + '37f332f68db77bd9d7edd4969571ad671cf9dd3b', + '132072df690933835eb8b6ad0b77e7b6f14acad7', + 'cc4a5ce1b3df48aec5d22d1f16b894a0b894eccc' + ] + + hash160target = [ + 'b472a266d0bd89c13706a4132ccfb16f7c3b9fcb', + '0e3397b4abc7a382b3ea2365883c3c7ca5f07600', + '53e0dacac5249e46114f65cb1f30d156b14e0bdc', + '1c9b7b48049a8f98699bca22a5856c5ef571cd68' + ] + + for i, s in enumerate(strvec): + digest = ripemd.RIPEMD160(s).digest() + hash160digest = ripemd.RIPEMD160(bin_sha256(s)).digest() + self.assertEqual(bytes_to_hex_string(digest), target[i]) + self.assertEqual(bytes_to_hex_string(hash160digest), hash160target[i]) + self.assertEqual(bytes_to_hex_string(bin_hash160(from_string_to_bytes(s))), hash160target[i]) + self.assertEqual(hash160(from_string_to_bytes(s)), hash160target[i]) + + +class TestScriptVsAddressOutputs(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print('Testing script vs address outputs') + + def test_all(self): + addr0 = '1Lqgj1ThNfwLgHMp5qJUerYsuUEm8vHmVG' + script0 = '76a914d99f84267d1f90f3e870a5e9d2399918140be61d88ac' + addr1 = '31oSGBBNrpCiENH3XMZpiP6GTC4tad4bMy' + script1 = 'a9140136d001619faba572df2ef3d193a57ad29122d987' + + inputs = [{ + 'output': 'cd6219ea108119dc62fce09698b649efde56eca7ce223a3315e8b431f6280ce7:0', + 'value': 158000 + }] + + outputs = [ + [{'address': addr0, 'value': 1000}, {'address': addr1, 'value': 2000}], + [{'script': script0, 'value': 1000}, {'address': addr1, 'value': 2000}], + [{'address': addr0, 'value': 1000}, {'script': script1, 'value': 2000}], + [{'script': script0, 'value': 1000}, {'script': script1, 'value': 2000}], + [addr0 + ':1000', addr1 + ':2000'], + [script0 + ':1000', addr1 + ':2000'], + [addr0 + ':1000', script1 + ':2000'], + [script0 + ':1000', script1 + ':2000'] + ] + + for outs in outputs: + tx_struct = deserialize(mktx(inputs, outs)) + self.assertEqual(tx_struct['outs'], outputs[3]) + + +class TestConversions(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.privkey_hex = ( + "e9873d79c6d87dc0fb6a5778633389f4453213303da61f20bd67fc233aa33262" + ) + cls.privkey_bin = ( + b"\xe9\x87=y\xc6\xd8}\xc0\xfbjWxc3\x89\xf4E2\x130=\xa6\x1f \xbdg\xfc#:\xa32b" + ) + + cls.pubkey_hex = ( + "04588d202afcc1ee4ab5254c7847ec25b9a135bbda0f2bc69ee1a714749fd77dc9f88ff2a00d7e752d44cbe16e1ebcf0890b76ec7c78886109dee76ccfc8445424" + ) + cls.pubkey_bin = ( + b"\x04X\x8d *\xfc\xc1\xeeJ\xb5%LxG\xec%\xb9\xa15\xbb\xda\x0f+\xc6\x9e\xe1\xa7\x14t\x9f\xd7}\xc9\xf8\x8f\xf2\xa0\r~u-D\xcb\xe1n\x1e\xbc\xf0\x89\x0bv\xec|x\x88a\t\xde\xe7l\xcf\xc8DT$" + ) + + def test_privkey_to_pubkey(self): + pubkey_hex = privkey_to_pubkey(self.privkey_hex) + self.assertEqual(pubkey_hex, self.pubkey_hex) + + def test_changebase(self): + self.assertEqual( + self.pubkey_bin, + changebase( + self.pubkey_hex, 16, 256, minlen=len(self.pubkey_bin) + ) + ) + + self.assertEqual( + self.pubkey_hex, + changebase( + self.pubkey_bin, 256, 16, minlen=len(self.pubkey_hex) + ) + ) + + self.assertEqual( + self.privkey_bin, + changebase( + self.privkey_hex, 16, 256, minlen=len(self.privkey_bin) + ) + ) + + self.assertEqual( + self.privkey_hex, + changebase( + self.privkey_bin, 256, 16, minlen=len(self.privkey_hex) + ) + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/src/lib/pybitcointools/test_stealth.py b/src/lib/pybitcointools/test_stealth.py new file mode 100644 index 00000000..01a1432d --- /dev/null +++ b/src/lib/pybitcointools/test_stealth.py @@ -0,0 +1,92 @@ +import bitcoin as bc +import sys +import unittest + +class TestStealth(unittest.TestCase): + + def setUp(self): + + if sys.getrecursionlimit() < 1000: + sys.setrecursionlimit(1000) + + self.addr = 'vJmtjxSDxNPXL4RNapp9ARdqKz3uJyf1EDGjr1Fgqs9c8mYsVH82h8wvnA4i5rtJ57mr3kor1EVJrd4e5upACJd588xe52yXtzumxj' + self.scan_pub = '025e58a31122b38c86abc119b9379fe247410aee87a533f9c07b189aef6c3c1f52' + self.scan_priv = '3e49e7257cb31db997edb1cf8299af0f37e2663e2260e4b8033e49d39a6d02f2' + self.spend_pub = '03616562c98e7d7b74be409a787cec3a912122f3fb331a9bee9b0b73ce7b9f50af' + self.spend_priv = 'aa3db0cfb3edc94de4d10f873f8190843f2a17484f6021a95a7742302c744748' + self.ephem_pub = '03403d306ec35238384c7e340393335f9bc9bb4a2e574eb4e419452c4ea19f14b0' + self.ephem_priv = '9e63abaf8dcd5ea3919e6de0b6c544e00bf51bf92496113a01d6e369944dc091' + self.shared_secret = 'a4047ee231f4121e3a99a3a3378542e34a384b865a9917789920e1f13ffd91c6' + self.pay_pub = '02726112ad39cb6bf848b1b1ef30b88e35286bf99f746c2be575f96c0e02a9357c' + self.pay_priv = '4e422fb1e5e1db6c1f6ab32a7706d368ceb385e7fab098e633c5c5949c3b97cd' + + self.testnet_addr = 'waPUuLLykSnY3itzf1AyrQZm42F7KyB7SR5zpfqmnzPXWhx9kXLzV3EcyqzDdpTwngiyCCMUqztS9S1d7XJs3JMt3MsHPDpBCudvx9' + + def test_address_encoding(self): + + sc_pub, sp_pub = bc.basic_stealth_address_to_pubkeys(self.addr) + self.assertEqual(sc_pub, self.scan_pub) + self.assertEqual(sp_pub, self.spend_pub) + + stealth_addr2 = bc.pubkeys_to_basic_stealth_address(sc_pub, sp_pub) + self.assertEqual(stealth_addr2, self.addr) + + magic_byte_testnet = 43 + sc_pub, sp_pub = bc.basic_stealth_address_to_pubkeys(self.testnet_addr) + self.assertEqual(sc_pub, self.scan_pub) + self.assertEqual(sp_pub, self.spend_pub) + + stealth_addr2 = bc.pubkeys_to_basic_stealth_address(sc_pub, sp_pub, magic_byte_testnet) + self.assertEqual(stealth_addr2, self.testnet_addr) + + def test_shared_secret(self): + + sh_sec = bc.shared_secret_sender(self.scan_pub, self.ephem_priv) + self.assertEqual(sh_sec, self.shared_secret) + + sh_sec2 = bc.shared_secret_receiver(self.ephem_pub, self.scan_priv) + self.assertEqual(sh_sec2, self.shared_secret) + + def test_uncover_pay_keys(self): + + pub = bc.uncover_pay_pubkey_sender(self.scan_pub, self.spend_pub, self.ephem_priv) + pub2 = bc.uncover_pay_pubkey_receiver(self.scan_priv, self.spend_pub, self.ephem_pub) + self.assertEqual(pub, self.pay_pub) + self.assertEqual(pub2, self.pay_pub) + + priv = bc.uncover_pay_privkey(self.scan_priv, self.spend_priv, self.ephem_pub) + self.assertEqual(priv, self.pay_priv) + + def test_stealth_metadata_script(self): + + nonce = int('deadbeef', 16) + script = bc.mk_stealth_metadata_script(self.ephem_pub, nonce) + self.assertEqual(script[6:], 'deadbeef' + self.ephem_pub) + + eph_pub = bc.ephem_pubkey_from_tx_script(script) + self.assertEqual(eph_pub, self.ephem_pub) + + def test_stealth_tx_outputs(self): + + nonce = int('deadbeef', 16) + value = 10**8 + outputs = bc.mk_stealth_tx_outputs(self.addr, value, self.ephem_priv, nonce) + + self.assertEqual(outputs[0]['value'], 0) + self.assertEqual(outputs[0]['script'], '6a2606deadbeef' + self.ephem_pub) + self.assertEqual(outputs[1]['address'], bc.pubkey_to_address(self.pay_pub)) + self.assertEqual(outputs[1]['value'], value) + + outputs = bc.mk_stealth_tx_outputs(self.testnet_addr, value, self.ephem_priv, nonce, 'testnet') + + self.assertEqual(outputs[0]['value'], 0) + self.assertEqual(outputs[0]['script'], '6a2606deadbeef' + self.ephem_pub) + self.assertEqual(outputs[1]['address'], bc.pubkey_to_address(self.pay_pub, 111)) + self.assertEqual(outputs[1]['value'], value) + + self.assertRaises(Exception, bc.mk_stealth_tx_outputs, self.testnet_addr, value, self.ephem_priv, nonce, 'btc') + + self.assertRaises(Exception, bc.mk_stealth_tx_outputs, self.addr, value, self.ephem_priv, nonce, 'testnet') + +if __name__ == '__main__': + unittest.main() diff --git a/COPYING b/src/lib/pyelliptic/LICENSE similarity index 99% rename from COPYING rename to src/lib/pyelliptic/LICENSE index f288702d..94a9ed02 100644 --- a/COPYING +++ b/src/lib/pyelliptic/LICENSE @@ -1,7 +1,7 @@ GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 - Copyright (C) 2007 Free Software Foundation, Inc. + Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. @@ -645,7 +645,7 @@ the "copyright" line and a pointer to where the full notice is found. GNU General Public License for more details. You should have received a copy of the GNU General Public License - along with this program. If not, see . + along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. @@ -664,11 +664,11 @@ might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see -. +. The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read -. +. diff --git a/src/lib/pyelliptic/README.md b/src/lib/pyelliptic/README.md new file mode 100644 index 00000000..587b1445 --- /dev/null +++ b/src/lib/pyelliptic/README.md @@ -0,0 +1,67 @@ +# PyElliptic + +PyElliptic is a high level wrapper for the cryptographic library : OpenSSL. +Under the GNU General Public License + +Python3 compatible. For GNU/Linux and Windows. +Require OpenSSL + +## Features + +### Asymmetric cryptography using Elliptic Curve Cryptography (ECC) + +* Key agreement : ECDH +* Digital signatures : ECDSA +* Hybrid encryption : ECIES (like RSA) + +### Symmetric cryptography + +* AES-128 (CBC, OFB, CFB) +* AES-256 (CBC, OFB, CFB) +* Blowfish (CFB and CBC) +* RC4 + +### Other + +* CSPRNG +* HMAC (using SHA512) +* PBKDF2 (SHA256 and SHA512) + +## Example + +```python +#!/usr/bin/python + +import pyelliptic + +# Symmetric encryption +iv = pyelliptic.Cipher.gen_IV('aes-256-cfb') +ctx = pyelliptic.Cipher("secretkey", iv, 1, ciphername='aes-256-cfb') + +ciphertext = ctx.update('test1') +ciphertext += ctx.update('test2') +ciphertext += ctx.final() + +ctx2 = pyelliptic.Cipher("secretkey", iv, 0, ciphername='aes-256-cfb') +print ctx2.ciphering(ciphertext) + +# Asymmetric encryption +alice = pyelliptic.ECC() # default curve: sect283r1 +bob = pyelliptic.ECC(curve='sect571r1') + +ciphertext = alice.encrypt("Hello Bob", bob.get_pubkey()) +print bob.decrypt(ciphertext) + +signature = bob.sign("Hello Alice") +# alice's job : +print pyelliptic.ECC(pubkey=bob.get_pubkey()).verify(signature, "Hello Alice") + +# ERROR !!! +try: + key = alice.get_ecdh_key(bob.get_pubkey()) +except: print("For ECDH key agreement, the keys must be defined on the same curve !") + +alice = pyelliptic.ECC(curve='sect571r1') +print alice.get_ecdh_key(bob.get_pubkey()).encode('hex') +print bob.get_ecdh_key(alice.get_pubkey()).encode('hex') +``` diff --git a/src/lib/pyelliptic/__init__.py b/src/lib/pyelliptic/__init__.py new file mode 100644 index 00000000..761d08af --- /dev/null +++ b/src/lib/pyelliptic/__init__.py @@ -0,0 +1,19 @@ +# Copyright (C) 2010 +# Author: Yann GUIBET +# Contact: + +__version__ = '1.3' + +__all__ = [ + 'OpenSSL', + 'ECC', + 'Cipher', + 'hmac_sha256', + 'hmac_sha512', + 'pbkdf2' +] + +from .openssl import OpenSSL +from .ecc import ECC +from .cipher import Cipher +from .hash import hmac_sha256, hmac_sha512, pbkdf2 diff --git a/src/lib/pyelliptic/arithmetic.py b/src/lib/pyelliptic/arithmetic.py new file mode 100644 index 00000000..1eec381a --- /dev/null +++ b/src/lib/pyelliptic/arithmetic.py @@ -0,0 +1,106 @@ +import hashlib, re + +P = 2**256-2**32-2**9-2**8-2**7-2**6-2**4-1 +A = 0 +Gx = 55066263022277343669578718895168534326250603453777594175500187360389116729240 +Gy = 32670510020758816978083085130507043184471273380659243275938904335757337482424 +G = (Gx,Gy) + +def inv(a,n): + lm, hm = 1,0 + low, high = a%n,n + while low > 1: + r = high/low + nm, new = hm-lm*r, high-low*r + lm, low, hm, high = nm, new, lm, low + return lm % n + +def get_code_string(base): + if base == 2: return '01' + elif base == 10: return '0123456789' + elif base == 16: return "0123456789abcdef" + elif base == 58: return "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + elif base == 256: return ''.join([chr(x) for x in range(256)]) + else: raise ValueError("Invalid base!") + +def encode(val,base,minlen=0): + code_string = get_code_string(base) + result = "" + while val > 0: + result = code_string[val % base] + result + val /= base + if len(result) < minlen: + result = code_string[0]*(minlen-len(result))+result + return result + +def decode(string,base): + code_string = get_code_string(base) + result = 0 + if base == 16: string = string.lower() + while len(string) > 0: + result *= base + result += code_string.find(string[0]) + string = string[1:] + return result + +def changebase(string,frm,to,minlen=0): + return encode(decode(string,frm),to,minlen) + +def base10_add(a,b): + if a == None: return b[0],b[1] + if b == None: return a[0],a[1] + if a[0] == b[0]: + if a[1] == b[1]: return base10_double(a[0],a[1]) + else: return None + m = ((b[1]-a[1]) * inv(b[0]-a[0],P)) % P + x = (m*m-a[0]-b[0]) % P + y = (m*(a[0]-x)-a[1]) % P + return (x,y) + +def base10_double(a): + if a == None: return None + m = ((3*a[0]*a[0]+A)*inv(2*a[1],P)) % P + x = (m*m-2*a[0]) % P + y = (m*(a[0]-x)-a[1]) % P + return (x,y) + +def base10_multiply(a,n): + if n == 0: return G + if n == 1: return a + if (n%2) == 0: return base10_double(base10_multiply(a,n/2)) + if (n%2) == 1: return base10_add(base10_double(base10_multiply(a,n/2)),a) + +def hex_to_point(h): return (decode(h[2:66],16),decode(h[66:],16)) + +def point_to_hex(p): return '04'+encode(p[0],16,64)+encode(p[1],16,64) + +def multiply(privkey,pubkey): + return point_to_hex(base10_multiply(hex_to_point(pubkey),decode(privkey,16))) + +def privtopub(privkey): + return point_to_hex(base10_multiply(G,decode(privkey,16))) + +def add(p1,p2): + if (len(p1)==32): + return encode(decode(p1,16) + decode(p2,16) % P,16,32) + else: + return point_to_hex(base10_add(hex_to_point(p1),hex_to_point(p2))) + +def hash_160(string): + intermed = hashlib.sha256(string).digest() + ripemd160 = hashlib.new('ripemd160') + ripemd160.update(intermed) + return ripemd160.digest() + +def dbl_sha256(string): + return hashlib.sha256(hashlib.sha256(string).digest()).digest() + +def bin_to_b58check(inp): + inp_fmtd = '\x00' + inp + leadingzbytes = len(re.match('^\x00*',inp_fmtd).group(0)) + checksum = dbl_sha256(inp_fmtd)[:4] + return '1' * leadingzbytes + changebase(inp_fmtd+checksum,256,58) + +#Convert a public key (in hex) to a Bitcoin address +def pubkey_to_address(pubkey): + return bin_to_b58check(hash_160(changebase(pubkey,16,256))) diff --git a/src/lib/pyelliptic/cipher.py b/src/lib/pyelliptic/cipher.py new file mode 100644 index 00000000..4a76a344 --- /dev/null +++ b/src/lib/pyelliptic/cipher.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (C) 2011 Yann GUIBET +# See LICENSE for details. + +from .openssl import OpenSSL + + +class Cipher: + """ + Symmetric encryption + + import pyelliptic + iv = pyelliptic.Cipher.gen_IV('aes-256-cfb') + ctx = pyelliptic.Cipher("secretkey", iv, 1, ciphername='aes-256-cfb') + ciphertext = ctx.update('test1') + ciphertext += ctx.update('test2') + ciphertext += ctx.final() + + ctx2 = pyelliptic.Cipher("secretkey", iv, 0, ciphername='aes-256-cfb') + print ctx2.ciphering(ciphertext) + """ + def __init__(self, key, iv, do, ciphername='aes-256-cbc'): + """ + do == 1 => Encrypt; do == 0 => Decrypt + """ + self.cipher = OpenSSL.get_cipher(ciphername) + self.ctx = OpenSSL.EVP_CIPHER_CTX_new() + if do == 1 or do == 0: + k = OpenSSL.malloc(key, len(key)) + IV = OpenSSL.malloc(iv, len(iv)) + OpenSSL.EVP_CipherInit_ex( + self.ctx, self.cipher.get_pointer(), 0, k, IV, do) + else: + raise Exception("RTFM ...") + + @staticmethod + def get_all_cipher(): + """ + static method, returns all ciphers available + """ + return OpenSSL.cipher_algo.keys() + + @staticmethod + def get_blocksize(ciphername): + cipher = OpenSSL.get_cipher(ciphername) + return cipher.get_blocksize() + + @staticmethod + def gen_IV(ciphername): + cipher = OpenSSL.get_cipher(ciphername) + return OpenSSL.rand(cipher.get_blocksize()) + + def update(self, input): + i = OpenSSL.c_int(0) + buffer = OpenSSL.malloc(b"", len(input) + self.cipher.get_blocksize()) + inp = OpenSSL.malloc(input, len(input)) + if OpenSSL.EVP_CipherUpdate(self.ctx, OpenSSL.byref(buffer), + OpenSSL.byref(i), inp, len(input)) == 0: + raise Exception("[OpenSSL] EVP_CipherUpdate FAIL ...") + return buffer.raw[0:i.value] + + def final(self): + i = OpenSSL.c_int(0) + buffer = OpenSSL.malloc(b"", self.cipher.get_blocksize()) + if (OpenSSL.EVP_CipherFinal_ex(self.ctx, OpenSSL.byref(buffer), + OpenSSL.byref(i))) == 0: + raise Exception("[OpenSSL] EVP_CipherFinal_ex FAIL ...") + return buffer.raw[0:i.value] + + def ciphering(self, input): + """ + Do update and final in one method + """ + buff = self.update(input) + return buff + self.final() + + def __del__(self): + OpenSSL.EVP_CIPHER_CTX_cleanup(self.ctx) + OpenSSL.EVP_CIPHER_CTX_free(self.ctx) diff --git a/src/lib/pyelliptic/ecc.py b/src/lib/pyelliptic/ecc.py new file mode 100644 index 00000000..b36806df --- /dev/null +++ b/src/lib/pyelliptic/ecc.py @@ -0,0 +1,460 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (C) 2011 Yann GUIBET +# See LICENSE for details. + +from hashlib import sha512 +from .openssl import OpenSSL +from .cipher import Cipher +from .hash import hmac_sha256, equals +from struct import pack, unpack + + +class ECC: + """ + Asymmetric encryption with Elliptic Curve Cryptography (ECC) + ECDH, ECDSA and ECIES + + import pyelliptic + + alice = pyelliptic.ECC() # default curve: sect283r1 + bob = pyelliptic.ECC(curve='sect571r1') + + ciphertext = alice.encrypt("Hello Bob", bob.get_pubkey()) + print bob.decrypt(ciphertext) + + signature = bob.sign("Hello Alice") + # alice's job : + print pyelliptic.ECC( + pubkey=bob.get_pubkey()).verify(signature, "Hello Alice") + + # ERROR !!! + try: + key = alice.get_ecdh_key(bob.get_pubkey()) + except: print("For ECDH key agreement,\ + the keys must be defined on the same curve !") + + alice = pyelliptic.ECC(curve='sect571r1') + print alice.get_ecdh_key(bob.get_pubkey()).encode('hex') + print bob.get_ecdh_key(alice.get_pubkey()).encode('hex') + + """ + def __init__(self, pubkey=None, privkey=None, pubkey_x=None, + pubkey_y=None, raw_privkey=None, curve='sect283r1'): + """ + For a normal and High level use, specifie pubkey, + privkey (if you need) and the curve + """ + if type(curve) == str: + self.curve = OpenSSL.get_curve(curve) + else: + self.curve = curve + + if pubkey_x is not None and pubkey_y is not None: + self._set_keys(pubkey_x, pubkey_y, raw_privkey) + elif pubkey is not None: + curve, pubkey_x, pubkey_y, i = ECC._decode_pubkey(pubkey) + if privkey is not None: + curve2, raw_privkey, i = ECC._decode_privkey(privkey) + if curve != curve2: + raise Exception("Bad ECC keys ...") + self.curve = curve + self._set_keys(pubkey_x, pubkey_y, raw_privkey) + else: + self.privkey, self.pubkey_x, self.pubkey_y = self._generate() + + def _set_keys(self, pubkey_x, pubkey_y, privkey): + if self.raw_check_key(privkey, pubkey_x, pubkey_y) < 0: + self.pubkey_x = None + self.pubkey_y = None + self.privkey = None + raise Exception("Bad ECC keys ...") + else: + self.pubkey_x = pubkey_x + self.pubkey_y = pubkey_y + self.privkey = privkey + + @staticmethod + def get_curves(): + """ + static method, returns the list of all the curves available + """ + return OpenSSL.curves.keys() + + def get_curve(self): + return OpenSSL.get_curve_by_id(self.curve) + + def get_curve_id(self): + return self.curve + + def get_pubkey(self): + """ + High level function which returns : + curve(2) + len_of_pubkeyX(2) + pubkeyX + len_of_pubkeyY + pubkeyY + """ + return b''.join((pack('!H', self.curve), + pack('!H', len(self.pubkey_x)), + self.pubkey_x, + pack('!H', len(self.pubkey_y)), + self.pubkey_y + )) + + def get_privkey(self): + """ + High level function which returns + curve(2) + len_of_privkey(2) + privkey + """ + return b''.join((pack('!H', self.curve), + pack('!H', len(self.privkey)), + self.privkey + )) + + @staticmethod + def _decode_pubkey(pubkey): + i = 0 + curve = unpack('!H', pubkey[i:i + 2])[0] + i += 2 + tmplen = unpack('!H', pubkey[i:i + 2])[0] + i += 2 + pubkey_x = pubkey[i:i + tmplen] + i += tmplen + tmplen = unpack('!H', pubkey[i:i + 2])[0] + i += 2 + pubkey_y = pubkey[i:i + tmplen] + i += tmplen + return curve, pubkey_x, pubkey_y, i + + @staticmethod + def _decode_privkey(privkey): + i = 0 + curve = unpack('!H', privkey[i:i + 2])[0] + i += 2 + tmplen = unpack('!H', privkey[i:i + 2])[0] + i += 2 + privkey = privkey[i:i + tmplen] + i += tmplen + return curve, privkey, i + + def _generate(self): + try: + pub_key_x = OpenSSL.BN_new() + pub_key_y = OpenSSL.BN_new() + + key = OpenSSL.EC_KEY_new_by_curve_name(self.curve) + if key == 0: + raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...") + if (OpenSSL.EC_KEY_generate_key(key)) == 0: + raise Exception("[OpenSSL] EC_KEY_generate_key FAIL ...") + if (OpenSSL.EC_KEY_check_key(key)) == 0: + raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...") + priv_key = OpenSSL.EC_KEY_get0_private_key(key) + + group = OpenSSL.EC_KEY_get0_group(key) + pub_key = OpenSSL.EC_KEY_get0_public_key(key) + + if (OpenSSL.EC_POINT_get_affine_coordinates_GFp(group, pub_key, + pub_key_x, + pub_key_y, 0 + )) == 0: + raise Exception( + "[OpenSSL] EC_POINT_get_affine_coordinates_GFp FAIL ...") + + privkey = OpenSSL.malloc(0, OpenSSL.BN_num_bytes(priv_key)) + pubkeyx = OpenSSL.malloc(0, OpenSSL.BN_num_bytes(pub_key_x)) + pubkeyy = OpenSSL.malloc(0, OpenSSL.BN_num_bytes(pub_key_y)) + OpenSSL.BN_bn2bin(priv_key, privkey) + privkey = privkey.raw + OpenSSL.BN_bn2bin(pub_key_x, pubkeyx) + pubkeyx = pubkeyx.raw + OpenSSL.BN_bn2bin(pub_key_y, pubkeyy) + pubkeyy = pubkeyy.raw + self.raw_check_key(privkey, pubkeyx, pubkeyy) + + return privkey, pubkeyx, pubkeyy + + finally: + OpenSSL.EC_KEY_free(key) + OpenSSL.BN_free(pub_key_x) + OpenSSL.BN_free(pub_key_y) + + def get_ecdh_key(self, pubkey): + """ + High level function. Compute public key with the local private key + and returns a 512bits shared key + """ + curve, pubkey_x, pubkey_y, i = ECC._decode_pubkey(pubkey) + if curve != self.curve: + raise Exception("ECC keys must be from the same curve !") + return sha512(self.raw_get_ecdh_key(pubkey_x, pubkey_y)).digest() + + def raw_get_ecdh_key(self, pubkey_x, pubkey_y): + try: + ecdh_keybuffer = OpenSSL.malloc(0, 32) + + other_key = OpenSSL.EC_KEY_new_by_curve_name(self.curve) + if other_key == 0: + raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...") + + other_pub_key_x = OpenSSL.BN_bin2bn(pubkey_x, len(pubkey_x), 0) + other_pub_key_y = OpenSSL.BN_bin2bn(pubkey_y, len(pubkey_y), 0) + + other_group = OpenSSL.EC_KEY_get0_group(other_key) + other_pub_key = OpenSSL.EC_POINT_new(other_group) + + if (OpenSSL.EC_POINT_set_affine_coordinates_GFp(other_group, + other_pub_key, + other_pub_key_x, + other_pub_key_y, + 0)) == 0: + raise Exception( + "[OpenSSL] EC_POINT_set_affine_coordinates_GFp FAIL ...") + if (OpenSSL.EC_KEY_set_public_key(other_key, other_pub_key)) == 0: + raise Exception("[OpenSSL] EC_KEY_set_public_key FAIL ...") + if (OpenSSL.EC_KEY_check_key(other_key)) == 0: + raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...") + + own_key = OpenSSL.EC_KEY_new_by_curve_name(self.curve) + if own_key == 0: + raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...") + own_priv_key = OpenSSL.BN_bin2bn( + self.privkey, len(self.privkey), 0) + + if (OpenSSL.EC_KEY_set_private_key(own_key, own_priv_key)) == 0: + raise Exception("[OpenSSL] EC_KEY_set_private_key FAIL ...") + + OpenSSL.ECDH_set_method(own_key, OpenSSL.ECDH_OpenSSL()) + ecdh_keylen = OpenSSL.ECDH_compute_key( + ecdh_keybuffer, 32, other_pub_key, own_key, 0) + + if ecdh_keylen != 32: + raise Exception("[OpenSSL] ECDH keylen FAIL ...") + + return ecdh_keybuffer.raw + + finally: + OpenSSL.EC_KEY_free(other_key) + OpenSSL.BN_free(other_pub_key_x) + OpenSSL.BN_free(other_pub_key_y) + OpenSSL.EC_POINT_free(other_pub_key) + OpenSSL.EC_KEY_free(own_key) + OpenSSL.BN_free(own_priv_key) + + def check_key(self, privkey, pubkey): + """ + Check the public key and the private key. + The private key is optional (replace by None) + """ + curve, pubkey_x, pubkey_y, i = ECC._decode_pubkey(pubkey) + if privkey is None: + raw_privkey = None + curve2 = curve + else: + curve2, raw_privkey, i = ECC._decode_privkey(privkey) + if curve != curve2: + raise Exception("Bad public and private key") + return self.raw_check_key(raw_privkey, pubkey_x, pubkey_y, curve) + + def raw_check_key(self, privkey, pubkey_x, pubkey_y, curve=None): + if curve is None: + curve = self.curve + elif type(curve) == str: + curve = OpenSSL.get_curve(curve) + else: + curve = curve + try: + key = OpenSSL.EC_KEY_new_by_curve_name(curve) + if key == 0: + raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...") + if privkey is not None: + priv_key = OpenSSL.BN_bin2bn(privkey, len(privkey), 0) + pub_key_x = OpenSSL.BN_bin2bn(pubkey_x, len(pubkey_x), 0) + pub_key_y = OpenSSL.BN_bin2bn(pubkey_y, len(pubkey_y), 0) + + if privkey is not None: + if (OpenSSL.EC_KEY_set_private_key(key, priv_key)) == 0: + raise Exception( + "[OpenSSL] EC_KEY_set_private_key FAIL ...") + + group = OpenSSL.EC_KEY_get0_group(key) + pub_key = OpenSSL.EC_POINT_new(group) + + if (OpenSSL.EC_POINT_set_affine_coordinates_GFp(group, pub_key, + pub_key_x, + pub_key_y, + 0)) == 0: + raise Exception( + "[OpenSSL] EC_POINT_set_affine_coordinates_GFp FAIL ...") + if (OpenSSL.EC_KEY_set_public_key(key, pub_key)) == 0: + raise Exception("[OpenSSL] EC_KEY_set_public_key FAIL ...") + if (OpenSSL.EC_KEY_check_key(key)) == 0: + raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...") + return 0 + + finally: + OpenSSL.EC_KEY_free(key) + OpenSSL.BN_free(pub_key_x) + OpenSSL.BN_free(pub_key_y) + OpenSSL.EC_POINT_free(pub_key) + if privkey is not None: + OpenSSL.BN_free(priv_key) + + def sign(self, inputb, digest_alg=OpenSSL.EVP_ecdsa): + """ + Sign the input with ECDSA method and returns the signature + """ + try: + size = len(inputb) + buff = OpenSSL.malloc(inputb, size) + digest = OpenSSL.malloc(0, 64) + md_ctx = OpenSSL.EVP_MD_CTX_create() + dgst_len = OpenSSL.pointer(OpenSSL.c_int(0)) + siglen = OpenSSL.pointer(OpenSSL.c_int(0)) + sig = OpenSSL.malloc(0, 151) + + key = OpenSSL.EC_KEY_new_by_curve_name(self.curve) + if key == 0: + raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...") + + priv_key = OpenSSL.BN_bin2bn(self.privkey, len(self.privkey), 0) + pub_key_x = OpenSSL.BN_bin2bn(self.pubkey_x, len(self.pubkey_x), 0) + pub_key_y = OpenSSL.BN_bin2bn(self.pubkey_y, len(self.pubkey_y), 0) + + if (OpenSSL.EC_KEY_set_private_key(key, priv_key)) == 0: + raise Exception("[OpenSSL] EC_KEY_set_private_key FAIL ...") + + group = OpenSSL.EC_KEY_get0_group(key) + pub_key = OpenSSL.EC_POINT_new(group) + + if (OpenSSL.EC_POINT_set_affine_coordinates_GFp(group, pub_key, + pub_key_x, + pub_key_y, + 0)) == 0: + raise Exception( + "[OpenSSL] EC_POINT_set_affine_coordinates_GFp FAIL ...") + if (OpenSSL.EC_KEY_set_public_key(key, pub_key)) == 0: + raise Exception("[OpenSSL] EC_KEY_set_public_key FAIL ...") + if (OpenSSL.EC_KEY_check_key(key)) == 0: + raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...") + + OpenSSL.EVP_MD_CTX_init(md_ctx) + OpenSSL.EVP_DigestInit_ex(md_ctx, digest_alg(), None) + + if (OpenSSL.EVP_DigestUpdate(md_ctx, buff, size)) == 0: + raise Exception("[OpenSSL] EVP_DigestUpdate FAIL ...") + OpenSSL.EVP_DigestFinal_ex(md_ctx, digest, dgst_len) + OpenSSL.ECDSA_sign(0, digest, dgst_len.contents, sig, siglen, key) + if (OpenSSL.ECDSA_verify(0, digest, dgst_len.contents, sig, + siglen.contents, key)) != 1: + raise Exception("[OpenSSL] ECDSA_verify FAIL ...") + + return sig.raw[:siglen.contents.value] + + finally: + OpenSSL.EC_KEY_free(key) + OpenSSL.BN_free(pub_key_x) + OpenSSL.BN_free(pub_key_y) + OpenSSL.BN_free(priv_key) + OpenSSL.EC_POINT_free(pub_key) + OpenSSL.EVP_MD_CTX_destroy(md_ctx) + + def verify(self, sig, inputb, digest_alg=OpenSSL.EVP_ecdsa): + """ + Verify the signature with the input and the local public key. + Returns a boolean + """ + try: + bsig = OpenSSL.malloc(sig, len(sig)) + binputb = OpenSSL.malloc(inputb, len(inputb)) + digest = OpenSSL.malloc(0, 64) + dgst_len = OpenSSL.pointer(OpenSSL.c_int(0)) + md_ctx = OpenSSL.EVP_MD_CTX_create() + + key = OpenSSL.EC_KEY_new_by_curve_name(self.curve) + + if key == 0: + raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...") + + pub_key_x = OpenSSL.BN_bin2bn(self.pubkey_x, len(self.pubkey_x), 0) + pub_key_y = OpenSSL.BN_bin2bn(self.pubkey_y, len(self.pubkey_y), 0) + group = OpenSSL.EC_KEY_get0_group(key) + pub_key = OpenSSL.EC_POINT_new(group) + + if (OpenSSL.EC_POINT_set_affine_coordinates_GFp(group, pub_key, + pub_key_x, + pub_key_y, + 0)) == 0: + raise Exception( + "[OpenSSL] EC_POINT_set_affine_coordinates_GFp FAIL ...") + if (OpenSSL.EC_KEY_set_public_key(key, pub_key)) == 0: + raise Exception("[OpenSSL] EC_KEY_set_public_key FAIL ...") + if (OpenSSL.EC_KEY_check_key(key)) == 0: + raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...") + + OpenSSL.EVP_MD_CTX_init(md_ctx) + OpenSSL.EVP_DigestInit_ex(md_ctx, digest_alg(), None) + if (OpenSSL.EVP_DigestUpdate(md_ctx, binputb, len(inputb))) == 0: + raise Exception("[OpenSSL] EVP_DigestUpdate FAIL ...") + + OpenSSL.EVP_DigestFinal_ex(md_ctx, digest, dgst_len) + ret = OpenSSL.ECDSA_verify( + 0, digest, dgst_len.contents, bsig, len(sig), key) + + if ret == -1: + return False # Fail to Check + else: + if ret == 0: + return False # Bad signature ! + else: + return True # Good + return False + + finally: + OpenSSL.EC_KEY_free(key) + OpenSSL.BN_free(pub_key_x) + OpenSSL.BN_free(pub_key_y) + OpenSSL.EC_POINT_free(pub_key) + OpenSSL.EVP_MD_CTX_destroy(md_ctx) + + @staticmethod + def encrypt(data, pubkey, ephemcurve=None, ciphername='aes-256-cbc'): + """ + Encrypt data with ECIES method using the public key of the recipient. + """ + curve, pubkey_x, pubkey_y, i = ECC._decode_pubkey(pubkey) + return ECC.raw_encrypt(data, pubkey_x, pubkey_y, curve=curve, + ephemcurve=ephemcurve, ciphername=ciphername) + + @staticmethod + def raw_encrypt(data, pubkey_x, pubkey_y, curve='sect283r1', + ephemcurve=None, ciphername='aes-256-cbc'): + if ephemcurve is None: + ephemcurve = curve + ephem = ECC(curve=ephemcurve) + key = sha512(ephem.raw_get_ecdh_key(pubkey_x, pubkey_y)).digest() + key_e, key_m = key[:32], key[32:] + pubkey = ephem.get_pubkey() + iv = OpenSSL.rand(OpenSSL.get_cipher(ciphername).get_blocksize()) + ctx = Cipher(key_e, iv, 1, ciphername) + ciphertext = iv + pubkey + ctx.ciphering(data) + mac = hmac_sha256(key_m, ciphertext) + return ciphertext + mac + + def decrypt(self, data, ciphername='aes-256-cbc'): + """ + Decrypt data with ECIES method using the local private key + """ + blocksize = OpenSSL.get_cipher(ciphername).get_blocksize() + iv = data[:blocksize] + i = blocksize + curve, pubkey_x, pubkey_y, i2 = ECC._decode_pubkey(data[i:]) + i += i2 + ciphertext = data[i:len(data)-32] + i += len(ciphertext) + mac = data[i:] + key = sha512(self.raw_get_ecdh_key(pubkey_x, pubkey_y)).digest() + key_e, key_m = key[:32], key[32:] + if not equals(hmac_sha256(key_m, data[:len(data) - 32]), mac): + raise RuntimeError("Fail to verify data") + ctx = Cipher(key_e, iv, 0, ciphername) + return ctx.ciphering(ciphertext) diff --git a/src/lib/pyelliptic/hash.py b/src/lib/pyelliptic/hash.py new file mode 100644 index 00000000..d6a15811 --- /dev/null +++ b/src/lib/pyelliptic/hash.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (C) 2011 Yann GUIBET +# See LICENSE for details. + +from .openssl import OpenSSL + + +# For python3 +def _equals_bytes(a, b): + if len(a) != len(b): + return False + result = 0 + for x, y in zip(a, b): + result |= x ^ y + return result == 0 + + +def _equals_str(a, b): + if len(a) != len(b): + return False + result = 0 + for x, y in zip(a, b): + result |= ord(x) ^ ord(y) + return result == 0 + + +def equals(a, b): + if isinstance(a, str): + return _equals_str(a, b) + else: + return _equals_bytes(a, b) + + +def hmac_sha256(k, m): + """ + Compute the key and the message with HMAC SHA5256 + """ + key = OpenSSL.malloc(k, len(k)) + d = OpenSSL.malloc(m, len(m)) + md = OpenSSL.malloc(0, 32) + i = OpenSSL.pointer(OpenSSL.c_int(0)) + OpenSSL.HMAC(OpenSSL.EVP_sha256(), key, len(k), d, len(m), md, i) + return md.raw + + +def hmac_sha512(k, m): + """ + Compute the key and the message with HMAC SHA512 + """ + key = OpenSSL.malloc(k, len(k)) + d = OpenSSL.malloc(m, len(m)) + md = OpenSSL.malloc(0, 64) + i = OpenSSL.pointer(OpenSSL.c_int(0)) + OpenSSL.HMAC(OpenSSL.EVP_sha512(), key, len(k), d, len(m), md, i) + return md.raw + + +def pbkdf2(password, salt=None, i=10000, keylen=64): + if salt is None: + salt = OpenSSL.rand(8) + p_password = OpenSSL.malloc(password, len(password)) + p_salt = OpenSSL.malloc(salt, len(salt)) + output = OpenSSL.malloc(0, keylen) + OpenSSL.PKCS5_PBKDF2_HMAC(p_password, len(password), p_salt, + len(p_salt), i, OpenSSL.EVP_sha256(), + keylen, output) + return salt, output.raw diff --git a/src/lib/pyelliptic/openssl.py b/src/lib/pyelliptic/openssl.py new file mode 100644 index 00000000..86c7eab5 --- /dev/null +++ b/src/lib/pyelliptic/openssl.py @@ -0,0 +1,454 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (C) 2011 Yann GUIBET +# See LICENSE for details. +# +# Software slightly changed by Jonathan Warren + +import sys +import ctypes + +OpenSSL = None + + +class CipherName: + def __init__(self, name, pointer, blocksize): + self._name = name + self._pointer = pointer + self._blocksize = blocksize + + def __str__(self): + return "Cipher : " + self._name + " | Blocksize : " + str(self._blocksize) + " | Fonction pointer : " + str(self._pointer) + + def get_pointer(self): + return self._pointer() + + def get_name(self): + return self._name + + def get_blocksize(self): + return self._blocksize + + +class _OpenSSL: + """ + Wrapper for OpenSSL using ctypes + """ + def __init__(self, library): + """ + Build the wrapper + """ + self._lib = ctypes.CDLL(library) + + self.pointer = ctypes.pointer + self.c_int = ctypes.c_int + self.byref = ctypes.byref + self.create_string_buffer = ctypes.create_string_buffer + + self.BN_new = self._lib.BN_new + self.BN_new.restype = ctypes.c_void_p + self.BN_new.argtypes = [] + + self.BN_free = self._lib.BN_free + self.BN_free.restype = None + self.BN_free.argtypes = [ctypes.c_void_p] + + self.BN_num_bits = self._lib.BN_num_bits + self.BN_num_bits.restype = ctypes.c_int + self.BN_num_bits.argtypes = [ctypes.c_void_p] + + self.BN_bn2bin = self._lib.BN_bn2bin + self.BN_bn2bin.restype = ctypes.c_int + self.BN_bn2bin.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.BN_bin2bn = self._lib.BN_bin2bn + self.BN_bin2bn.restype = ctypes.c_void_p + self.BN_bin2bn.argtypes = [ctypes.c_void_p, ctypes.c_int, + ctypes.c_void_p] + + self.EC_KEY_free = self._lib.EC_KEY_free + self.EC_KEY_free.restype = None + self.EC_KEY_free.argtypes = [ctypes.c_void_p] + + self.EC_KEY_new_by_curve_name = self._lib.EC_KEY_new_by_curve_name + self.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p + self.EC_KEY_new_by_curve_name.argtypes = [ctypes.c_int] + + self.EC_KEY_generate_key = self._lib.EC_KEY_generate_key + self.EC_KEY_generate_key.restype = ctypes.c_int + self.EC_KEY_generate_key.argtypes = [ctypes.c_void_p] + + self.EC_KEY_check_key = self._lib.EC_KEY_check_key + self.EC_KEY_check_key.restype = ctypes.c_int + self.EC_KEY_check_key.argtypes = [ctypes.c_void_p] + + self.EC_KEY_get0_private_key = self._lib.EC_KEY_get0_private_key + self.EC_KEY_get0_private_key.restype = ctypes.c_void_p + self.EC_KEY_get0_private_key.argtypes = [ctypes.c_void_p] + + self.EC_KEY_get0_public_key = self._lib.EC_KEY_get0_public_key + self.EC_KEY_get0_public_key.restype = ctypes.c_void_p + self.EC_KEY_get0_public_key.argtypes = [ctypes.c_void_p] + + self.EC_KEY_get0_group = self._lib.EC_KEY_get0_group + self.EC_KEY_get0_group.restype = ctypes.c_void_p + self.EC_KEY_get0_group.argtypes = [ctypes.c_void_p] + + self.EC_POINT_get_affine_coordinates_GFp = self._lib.EC_POINT_get_affine_coordinates_GFp + self.EC_POINT_get_affine_coordinates_GFp.restype = ctypes.c_int + self.EC_POINT_get_affine_coordinates_GFp.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] + + self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key + self.EC_KEY_set_private_key.restype = ctypes.c_int + self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, + ctypes.c_void_p] + + self.EC_KEY_set_public_key = self._lib.EC_KEY_set_public_key + self.EC_KEY_set_public_key.restype = ctypes.c_int + self.EC_KEY_set_public_key.argtypes = [ctypes.c_void_p, + ctypes.c_void_p] + + self.EC_KEY_set_group = self._lib.EC_KEY_set_group + self.EC_KEY_set_group.restype = ctypes.c_int + self.EC_KEY_set_group.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.EC_POINT_set_affine_coordinates_GFp = self._lib.EC_POINT_set_affine_coordinates_GFp + self.EC_POINT_set_affine_coordinates_GFp.restype = ctypes.c_int + self.EC_POINT_set_affine_coordinates_GFp.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] + + self.EC_POINT_new = self._lib.EC_POINT_new + self.EC_POINT_new.restype = ctypes.c_void_p + self.EC_POINT_new.argtypes = [ctypes.c_void_p] + + self.EC_POINT_free = self._lib.EC_POINT_free + self.EC_POINT_free.restype = None + self.EC_POINT_free.argtypes = [ctypes.c_void_p] + + self.BN_CTX_free = self._lib.BN_CTX_free + self.BN_CTX_free.restype = None + self.BN_CTX_free.argtypes = [ctypes.c_void_p] + + self.EC_POINT_mul = self._lib.EC_POINT_mul + self.EC_POINT_mul.restype = None + self.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] + + self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key + self.EC_KEY_set_private_key.restype = ctypes.c_int + self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, + ctypes.c_void_p] + + self.ECDH_OpenSSL = self._lib.ECDH_OpenSSL + self._lib.ECDH_OpenSSL.restype = ctypes.c_void_p + self._lib.ECDH_OpenSSL.argtypes = [] + + self.BN_CTX_new = self._lib.BN_CTX_new + self._lib.BN_CTX_new.restype = ctypes.c_void_p + self._lib.BN_CTX_new.argtypes = [] + + self.ECDH_set_method = self._lib.ECDH_set_method + self._lib.ECDH_set_method.restype = ctypes.c_int + self._lib.ECDH_set_method.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.ECDH_compute_key = self._lib.ECDH_compute_key + self.ECDH_compute_key.restype = ctypes.c_int + self.ECDH_compute_key.argtypes = [ctypes.c_void_p, + ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p] + + self.EVP_CipherInit_ex = self._lib.EVP_CipherInit_ex + self.EVP_CipherInit_ex.restype = ctypes.c_int + self.EVP_CipherInit_ex.argtypes = [ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_void_p] + + self.EVP_CIPHER_CTX_new = self._lib.EVP_CIPHER_CTX_new + self.EVP_CIPHER_CTX_new.restype = ctypes.c_void_p + self.EVP_CIPHER_CTX_new.argtypes = [] + + # Cipher + self.EVP_aes_128_cfb128 = self._lib.EVP_aes_128_cfb128 + self.EVP_aes_128_cfb128.restype = ctypes.c_void_p + self.EVP_aes_128_cfb128.argtypes = [] + + self.EVP_aes_256_cfb128 = self._lib.EVP_aes_256_cfb128 + self.EVP_aes_256_cfb128.restype = ctypes.c_void_p + self.EVP_aes_256_cfb128.argtypes = [] + + self.EVP_aes_128_cbc = self._lib.EVP_aes_128_cbc + self.EVP_aes_128_cbc.restype = ctypes.c_void_p + self.EVP_aes_128_cbc.argtypes = [] + + self.EVP_aes_256_cbc = self._lib.EVP_aes_256_cbc + self.EVP_aes_256_cbc.restype = ctypes.c_void_p + self.EVP_aes_256_cbc.argtypes = [] + + #self.EVP_aes_128_ctr = self._lib.EVP_aes_128_ctr + #self.EVP_aes_128_ctr.restype = ctypes.c_void_p + #self.EVP_aes_128_ctr.argtypes = [] + + #self.EVP_aes_256_ctr = self._lib.EVP_aes_256_ctr + #self.EVP_aes_256_ctr.restype = ctypes.c_void_p + #self.EVP_aes_256_ctr.argtypes = [] + + self.EVP_aes_128_ofb = self._lib.EVP_aes_128_ofb + self.EVP_aes_128_ofb.restype = ctypes.c_void_p + self.EVP_aes_128_ofb.argtypes = [] + + self.EVP_aes_256_ofb = self._lib.EVP_aes_256_ofb + self.EVP_aes_256_ofb.restype = ctypes.c_void_p + self.EVP_aes_256_ofb.argtypes = [] + + self.EVP_bf_cbc = self._lib.EVP_bf_cbc + self.EVP_bf_cbc.restype = ctypes.c_void_p + self.EVP_bf_cbc.argtypes = [] + + self.EVP_bf_cfb64 = self._lib.EVP_bf_cfb64 + self.EVP_bf_cfb64.restype = ctypes.c_void_p + self.EVP_bf_cfb64.argtypes = [] + + self.EVP_rc4 = self._lib.EVP_rc4 + self.EVP_rc4.restype = ctypes.c_void_p + self.EVP_rc4.argtypes = [] + + self.EVP_CIPHER_CTX_cleanup = self._lib.EVP_CIPHER_CTX_cleanup + self.EVP_CIPHER_CTX_cleanup.restype = ctypes.c_int + self.EVP_CIPHER_CTX_cleanup.argtypes = [ctypes.c_void_p] + + self.EVP_CIPHER_CTX_free = self._lib.EVP_CIPHER_CTX_free + self.EVP_CIPHER_CTX_free.restype = None + self.EVP_CIPHER_CTX_free.argtypes = [ctypes.c_void_p] + + self.EVP_CipherUpdate = self._lib.EVP_CipherUpdate + self.EVP_CipherUpdate.restype = ctypes.c_int + self.EVP_CipherUpdate.argtypes = [ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int] + + self.EVP_CipherFinal_ex = self._lib.EVP_CipherFinal_ex + self.EVP_CipherFinal_ex.restype = ctypes.c_int + self.EVP_CipherFinal_ex.argtypes = [ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_void_p] + + self.EVP_DigestInit = self._lib.EVP_DigestInit + self.EVP_DigestInit.restype = ctypes.c_int + self._lib.EVP_DigestInit.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.EVP_DigestInit_ex = self._lib.EVP_DigestInit_ex + self.EVP_DigestInit_ex.restype = ctypes.c_int + self._lib.EVP_DigestInit_ex.argtypes = 3 * [ctypes.c_void_p] + + self.EVP_DigestUpdate = self._lib.EVP_DigestUpdate + self.EVP_DigestUpdate.restype = ctypes.c_int + self.EVP_DigestUpdate.argtypes = [ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_int] + + self.EVP_DigestFinal = self._lib.EVP_DigestFinal + self.EVP_DigestFinal.restype = ctypes.c_int + self.EVP_DigestFinal.argtypes = [ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_void_p] + + self.EVP_DigestFinal_ex = self._lib.EVP_DigestFinal_ex + self.EVP_DigestFinal_ex.restype = ctypes.c_int + self.EVP_DigestFinal_ex.argtypes = [ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_void_p] + + self.EVP_ecdsa = self._lib.EVP_ecdsa + self._lib.EVP_ecdsa.restype = ctypes.c_void_p + self._lib.EVP_ecdsa.argtypes = [] + + self.ECDSA_sign = self._lib.ECDSA_sign + self.ECDSA_sign.restype = ctypes.c_int + self.ECDSA_sign.argtypes = [ctypes.c_int, ctypes.c_void_p, + ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] + + self.ECDSA_verify = self._lib.ECDSA_verify + self.ECDSA_verify.restype = ctypes.c_int + self.ECDSA_verify.argtypes = [ctypes.c_int, ctypes.c_void_p, + ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p] + + self.EVP_MD_CTX_create = self._lib.EVP_MD_CTX_create + self.EVP_MD_CTX_create.restype = ctypes.c_void_p + self.EVP_MD_CTX_create.argtypes = [] + + self.EVP_MD_CTX_init = self._lib.EVP_MD_CTX_init + self.EVP_MD_CTX_init.restype = None + self.EVP_MD_CTX_init.argtypes = [ctypes.c_void_p] + + self.EVP_MD_CTX_destroy = self._lib.EVP_MD_CTX_destroy + self.EVP_MD_CTX_destroy.restype = None + self.EVP_MD_CTX_destroy.argtypes = [ctypes.c_void_p] + + self.RAND_bytes = self._lib.RAND_bytes + self.RAND_bytes.restype = ctypes.c_int + self.RAND_bytes.argtypes = [ctypes.c_void_p, ctypes.c_int] + + + self.EVP_sha256 = self._lib.EVP_sha256 + self.EVP_sha256.restype = ctypes.c_void_p + self.EVP_sha256.argtypes = [] + + self.i2o_ECPublicKey = self._lib.i2o_ECPublicKey + self.i2o_ECPublicKey.restype = ctypes.c_void_p + self.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.EVP_sha512 = self._lib.EVP_sha512 + self.EVP_sha512.restype = ctypes.c_void_p + self.EVP_sha512.argtypes = [] + + self.HMAC = self._lib.HMAC + self.HMAC.restype = ctypes.c_void_p + self.HMAC.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, + ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p] + + try: + self.PKCS5_PBKDF2_HMAC = self._lib.PKCS5_PBKDF2_HMAC + except: + # The above is not compatible with all versions of OSX. + self.PKCS5_PBKDF2_HMAC = self._lib.PKCS5_PBKDF2_HMAC_SHA1 + + self.PKCS5_PBKDF2_HMAC.restype = ctypes.c_int + self.PKCS5_PBKDF2_HMAC.argtypes = [ctypes.c_void_p, ctypes.c_int, + ctypes.c_void_p, ctypes.c_int, + ctypes.c_int, ctypes.c_void_p, + ctypes.c_int, ctypes.c_void_p] + + self._set_ciphers() + self._set_curves() + + def _set_ciphers(self): + self.cipher_algo = { + 'aes-128-cbc': CipherName('aes-128-cbc', self.EVP_aes_128_cbc, 16), + 'aes-256-cbc': CipherName('aes-256-cbc', self.EVP_aes_256_cbc, 16), + 'aes-128-cfb': CipherName('aes-128-cfb', self.EVP_aes_128_cfb128, 16), + 'aes-256-cfb': CipherName('aes-256-cfb', self.EVP_aes_256_cfb128, 16), + 'aes-128-ofb': CipherName('aes-128-ofb', self._lib.EVP_aes_128_ofb, 16), + 'aes-256-ofb': CipherName('aes-256-ofb', self._lib.EVP_aes_256_ofb, 16), + #'aes-128-ctr': CipherName('aes-128-ctr', self._lib.EVP_aes_128_ctr, 16), + #'aes-256-ctr': CipherName('aes-256-ctr', self._lib.EVP_aes_256_ctr, 16), + 'bf-cfb': CipherName('bf-cfb', self.EVP_bf_cfb64, 8), + 'bf-cbc': CipherName('bf-cbc', self.EVP_bf_cbc, 8), + 'rc4': CipherName('rc4', self.EVP_rc4, 128), # 128 is the initialisation size not block size + } + + def _set_curves(self): + self.curves = { + 'secp112r1': 704, + 'secp112r2': 705, + 'secp128r1': 706, + 'secp128r2': 707, + 'secp160k1': 708, + 'secp160r1': 709, + 'secp160r2': 710, + 'secp192k1': 711, + 'secp224k1': 712, + 'secp224r1': 713, + 'secp256k1': 714, + 'secp384r1': 715, + 'secp521r1': 716, + 'sect113r1': 717, + 'sect113r2': 718, + 'sect131r1': 719, + 'sect131r2': 720, + 'sect163k1': 721, + 'sect163r1': 722, + 'sect163r2': 723, + 'sect193r1': 724, + 'sect193r2': 725, + 'sect233k1': 726, + 'sect233r1': 727, + 'sect239k1': 728, + 'sect283k1': 729, + 'sect283r1': 730, + 'sect409k1': 731, + 'sect409r1': 732, + 'sect571k1': 733, + 'sect571r1': 734, + } + + def BN_num_bytes(self, x): + """ + returns the length of a BN (OpenSSl API) + """ + return int((self.BN_num_bits(x) + 7) / 8) + + def get_cipher(self, name): + """ + returns the OpenSSL cipher instance + """ + if name not in self.cipher_algo: + raise Exception("Unknown cipher") + return self.cipher_algo[name] + + def get_curve(self, name): + """ + returns the id of a elliptic curve + """ + if name not in self.curves: + raise Exception("Unknown curve") + return self.curves[name] + + def get_curve_by_id(self, id): + """ + returns the name of a elliptic curve with his id + """ + res = None + for i in self.curves: + if self.curves[i] == id: + res = i + break + if res is None: + raise Exception("Unknown curve") + return res + + def rand(self, size): + """ + OpenSSL random function + """ + buffer = self.malloc(0, size) + # This pyelliptic library, by default, didn't check the return value of RAND_bytes. It is + # evidently possible that it returned an error and not-actually-random data. However, in + # tests on various operating systems, while generating hundreds of gigabytes of random + # strings of various sizes I could not get an error to occur. Also Bitcoin doesn't check + # the return value of RAND_bytes either. + # Fixed in Bitmessage version 0.4.2 (in source code on 2013-10-13) + while self.RAND_bytes(buffer, size) != 1: + import time + time.sleep(1) + return buffer.raw + + def malloc(self, data, size): + """ + returns a create_string_buffer (ctypes) + """ + buffer = None + if data != 0: + if sys.version_info.major == 3 and isinstance(data, type('')): + data = data.encode() + buffer = self.create_string_buffer(data, size) + else: + buffer = self.create_string_buffer(size) + return buffer + + +def openLibrary(): + global OpenSSL + try: + if sys.platform.startswith("win"): + dll_path = "src/lib/opensslVerify/libeay32.dll" + elif sys.platform == "cygwin": + dll_path = "/bin/cygcrypto-1.0.0.dll" + else: + dll_path = "/usr/local/ssl/lib/libcrypto.so" + ssl = _OpenSSL(dll_path) + assert ssl + except Exception, err: + ssl = _OpenSSL(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or ctypes.util.find_library('libcrypto') or 'libeay32') + OpenSSL = ssl + + +def closeLibrary(): + import _ctypes + if "FreeLibrary" in dir(_ctypes): + _ctypes.FreeLibrary(OpenSSL._lib._handle) + else: + _ctypes.dlclose(OpenSSL._lib._handle) + +openLibrary() diff --git a/src/lib/sslcrypto/LICENSE b/src/lib/sslcrypto/LICENSE deleted file mode 100644 index 2feefc45..00000000 --- a/src/lib/sslcrypto/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -MIT License - -Copyright (c) 2019 Ivan Machugovskiy - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - - -Additionally, the following licenses must be preserved: - -- ripemd implementation is licensed under BSD-3 by Markus Friedl, see `_ripemd.py`; -- jacobian curve implementation is dual-licensed under MIT or public domain license, see `_jacobian.py`. diff --git a/src/lib/sslcrypto/__init__.py b/src/lib/sslcrypto/__init__.py deleted file mode 100644 index 77f9b3f3..00000000 --- a/src/lib/sslcrypto/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -__all__ = ["aes", "ecc", "rsa"] - -try: - from .openssl import aes, ecc, rsa -except OSError: - from .fallback import aes, ecc, rsa diff --git a/src/lib/sslcrypto/_aes.py b/src/lib/sslcrypto/_aes.py deleted file mode 100644 index 4f8d4ec2..00000000 --- a/src/lib/sslcrypto/_aes.py +++ /dev/null @@ -1,53 +0,0 @@ -# pylint: disable=import-outside-toplevel - -class AES: - def __init__(self, backend, fallback=None): - self._backend = backend - self._fallback = fallback - - - def get_algo_key_length(self, algo): - if algo.count("-") != 2: - raise ValueError("Invalid algorithm name") - try: - return int(algo.split("-")[1]) // 8 - except ValueError: - raise ValueError("Invalid algorithm name") from None - - - def new_key(self, algo="aes-256-cbc"): - if not self._backend.is_algo_supported(algo): - if self._fallback is None: - raise ValueError("This algorithm is not supported") - return self._fallback.new_key(algo) - return self._backend.random(self.get_algo_key_length(algo)) - - - def encrypt(self, data, key, algo="aes-256-cbc"): - if not self._backend.is_algo_supported(algo): - if self._fallback is None: - raise ValueError("This algorithm is not supported") - return self._fallback.encrypt(data, key, algo) - - key_length = self.get_algo_key_length(algo) - if len(key) != key_length: - raise ValueError("Expected key to be {} bytes, got {} bytes".format(key_length, len(key))) - - return self._backend.encrypt(data, key, algo) - - - def decrypt(self, ciphertext, iv, key, algo="aes-256-cbc"): - if not self._backend.is_algo_supported(algo): - if self._fallback is None: - raise ValueError("This algorithm is not supported") - return self._fallback.decrypt(ciphertext, iv, key, algo) - - key_length = self.get_algo_key_length(algo) - if len(key) != key_length: - raise ValueError("Expected key to be {} bytes, got {} bytes".format(key_length, len(key))) - - return self._backend.decrypt(ciphertext, iv, key, algo) - - - def get_backend(self): - return self._backend.get_backend() diff --git a/src/lib/sslcrypto/_ecc.py b/src/lib/sslcrypto/_ecc.py deleted file mode 100644 index 88e04576..00000000 --- a/src/lib/sslcrypto/_ecc.py +++ /dev/null @@ -1,506 +0,0 @@ -import hashlib -import struct -import hmac -import base58 - - -try: - hashlib.new("ripemd160") -except ValueError: - # No native implementation - from . import _ripemd - def ripemd160(*args): - return _ripemd.new(*args) -else: - # Use OpenSSL - def ripemd160(*args): - return hashlib.new("ripemd160", *args) - - -class ECC: - # pylint: disable=line-too-long - # name: (nid, p, n, a, b, (Gx, Gy)), - CURVES = { - "secp112r1": ( - 704, - 0xDB7C2ABF62E35E668076BEAD208B, - 0xDB7C2ABF62E35E7628DFAC6561C5, - 0xDB7C2ABF62E35E668076BEAD2088, - 0x659EF8BA043916EEDE8911702B22, - ( - 0x09487239995A5EE76B55F9C2F098, - 0xA89CE5AF8724C0A23E0E0FF77500 - ) - ), - "secp112r2": ( - 705, - 0xDB7C2ABF62E35E668076BEAD208B, - 0x36DF0AAFD8B8D7597CA10520D04B, - 0x6127C24C05F38A0AAAF65C0EF02C, - 0x51DEF1815DB5ED74FCC34C85D709, - ( - 0x4BA30AB5E892B4E1649DD0928643, - 0xADCD46F5882E3747DEF36E956E97 - ) - ), - "secp128r1": ( - 706, - 0xFFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFF, - 0xFFFFFFFE0000000075A30D1B9038A115, - 0xFFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFC, - 0xE87579C11079F43DD824993C2CEE5ED3, - ( - 0x161FF7528B899B2D0C28607CA52C5B86, - 0xCF5AC8395BAFEB13C02DA292DDED7A83 - ) - ), - "secp128r2": ( - 707, - 0xFFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFF, - 0x3FFFFFFF7FFFFFFFBE0024720613B5A3, - 0xD6031998D1B3BBFEBF59CC9BBFF9AEE1, - 0x5EEEFCA380D02919DC2C6558BB6D8A5D, - ( - 0x7B6AA5D85E572983E6FB32A7CDEBC140, - 0x27B6916A894D3AEE7106FE805FC34B44 - ) - ), - "secp160k1": ( - 708, - 0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC73, - 0x0100000000000000000001B8FA16DFAB9ACA16B6B3, - 0, - 7, - ( - 0x3B4C382CE37AA192A4019E763036F4F5DD4D7EBB, - 0x938CF935318FDCED6BC28286531733C3F03C4FEE - ) - ), - "secp160r1": ( - 709, - 0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF7FFFFFFF, - 0x0100000000000000000001F4C8F927AED3CA752257, - 0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF7FFFFFFC, - 0x001C97BEFC54BD7A8B65ACF89F81D4D4ADC565FA45, - ( - 0x4A96B5688EF573284664698968C38BB913CBFC82, - 0x23A628553168947D59DCC912042351377AC5FB32 - ) - ), - "secp160r2": ( - 710, - 0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC73, - 0x0100000000000000000000351EE786A818F3A1A16B, - 0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC70, - 0x00B4E134D3FB59EB8BAB57274904664D5AF50388BA, - ( - 0x52DCB034293A117E1F4FF11B30F7199D3144CE6D, - 0xFEAFFEF2E331F296E071FA0DF9982CFEA7D43F2E - ) - ), - "secp192k1": ( - 711, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFEE37, - 0xFFFFFFFFFFFFFFFFFFFFFFFE26F2FC170F69466A74DEFD8D, - 0, - 3, - ( - 0xDB4FF10EC057E9AE26B07D0280B7F4341DA5D1B1EAE06C7D, - 0x9B2F2F6D9C5628A7844163D015BE86344082AA88D95E2F9D - ) - ), - "prime192v1": ( - 409, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFF, - 0xFFFFFFFFFFFFFFFFFFFFFFFF99DEF836146BC9B1B4D22831, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFC, - 0x64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1, - ( - 0x188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012, - 0x07192B95FFC8DA78631011ED6B24CDD573F977A11E794811 - ) - ), - "secp224k1": ( - 712, - 0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFE56D, - 0x010000000000000000000000000001DCE8D2EC6184CAF0A971769FB1F7, - 0, - 5, - ( - 0xA1455B334DF099DF30FC28A169A467E9E47075A90F7E650EB6B7A45C, - 0x7E089FED7FBA344282CAFBD6F7E319F7C0B0BD59E2CA4BDB556D61A5 - ) - ), - "secp224r1": ( - 713, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF000000000000000000000001, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFF16A2E0B8F03E13DD29455C5C2A3D, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFFFFFFFFFE, - 0xB4050A850C04B3ABF54132565044B0B7D7BFD8BA270B39432355FFB4, - ( - 0xB70E0CBD6BB4BF7F321390B94A03C1D356C21122343280D6115C1D21, - 0xBD376388B5F723FB4C22DFE6CD4375A05A07476444D5819985007E34 - ) - ), - "secp256k1": ( - 714, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141, - 0, - 7, - ( - 0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798, - 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8 - ) - ), - "prime256v1": ( - 715, - 0xFFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF, - 0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551, - 0xFFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFC, - 0x5AC635D8AA3A93E7B3EBBD55769886BC651D06B0CC53B0F63BCE3C3E27D2604B, - ( - 0x6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296, - 0x4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5 - ) - ), - "secp384r1": ( - 716, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFF0000000000000000FFFFFFFF, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC7634D81F4372DDF581A0DB248B0A77AECEC196ACCC52973, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFF0000000000000000FFFFFFFC, - 0xB3312FA7E23EE7E4988E056BE3F82D19181D9C6EFE8141120314088F5013875AC656398D8A2ED19D2A85C8EDD3EC2AEF, - ( - 0xAA87CA22BE8B05378EB1C71EF320AD746E1D3B628BA79B9859F741E082542A385502F25DBF55296C3A545E3872760AB7, - 0x3617DE4A96262C6F5D9E98BF9292DC29F8F41DBD289A147CE9DA3113B5F0B8C00A60B1CE1D7E819D7A431D7C90EA0E5F - ) - ), - "secp521r1": ( - 717, - 0x01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF, - 0x01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFA51868783BF2F966B7FCC0148F709A5D03BB5C9B8899C47AEBB6FB71E91386409, - 0x01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC, - 0x0051953EB9618E1C9A1F929A21A0B68540EEA2DA725B99B315F3B8B489918EF109E156193951EC7E937B1652C0BD3BB1BF073573DF883D2C34F1EF451FD46B503F00, - ( - 0x00C6858E06B70404E9CD9E3ECB662395B4429C648139053FB521F828AF606B4D3DBAA14B5E77EFE75928FE1DC127A2FFA8DE3348B3C1856A429BF97E7E31C2E5BD66, - 0x011839296A789A3BC0045C8A5FB42C7D1BD998F54449579B446817AFBD17273E662C97EE72995EF42640C550B9013FAD0761353C7086A272C24088BE94769FD16650 - ) - ) - } - # pylint: enable=line-too-long - - def __init__(self, backend, aes): - self._backend = backend - self._aes = aes - - - def get_curve(self, name): - if name not in self.CURVES: - raise ValueError("Unknown curve {}".format(name)) - nid, p, n, a, b, g = self.CURVES[name] - return EllipticCurve(self._backend(p, n, a, b, g), self._aes, nid) - - - def get_backend(self): - return self._backend.get_backend() - - -class EllipticCurve: - def __init__(self, backend, aes, nid): - self._backend = backend - self._aes = aes - self.nid = nid - - - def _encode_public_key(self, x, y, is_compressed=True, raw=True): - if raw: - if is_compressed: - return bytes([0x02 + (y[-1] % 2)]) + x - else: - return bytes([0x04]) + x + y - else: - return struct.pack("!HH", self.nid, len(x)) + x + struct.pack("!H", len(y)) + y - - - def _decode_public_key(self, public_key, partial=False): - if not public_key: - raise ValueError("No public key") - - if public_key[0] == 0x04: - # Uncompressed - expected_length = 1 + 2 * self._backend.public_key_length - if partial: - if len(public_key) < expected_length: - raise ValueError("Invalid uncompressed public key length") - else: - if len(public_key) != expected_length: - raise ValueError("Invalid uncompressed public key length") - x = public_key[1:1 + self._backend.public_key_length] - y = public_key[1 + self._backend.public_key_length:expected_length] - if partial: - return (x, y), expected_length - else: - return x, y - elif public_key[0] in (0x02, 0x03): - # Compressed - expected_length = 1 + self._backend.public_key_length - if partial: - if len(public_key) < expected_length: - raise ValueError("Invalid compressed public key length") - else: - if len(public_key) != expected_length: - raise ValueError("Invalid compressed public key length") - - x, y = self._backend.decompress_point(public_key[:expected_length]) - # Sanity check - if x != public_key[1:expected_length]: - raise ValueError("Incorrect compressed public key") - if partial: - return (x, y), expected_length - else: - return x, y - else: - raise ValueError("Invalid public key prefix") - - - def _decode_public_key_openssl(self, public_key, partial=False): - if not public_key: - raise ValueError("No public key") - - i = 0 - - nid, = struct.unpack("!H", public_key[i:i + 2]) - i += 2 - if nid != self.nid: - raise ValueError("Wrong curve") - - xlen, = struct.unpack("!H", public_key[i:i + 2]) - i += 2 - if len(public_key) - i < xlen: - raise ValueError("Too short public key") - x = public_key[i:i + xlen] - i += xlen - - ylen, = struct.unpack("!H", public_key[i:i + 2]) - i += 2 - if len(public_key) - i < ylen: - raise ValueError("Too short public key") - y = public_key[i:i + ylen] - i += ylen - - if partial: - return (x, y), i - else: - if i < len(public_key): - raise ValueError("Too long public key") - return x, y - - - def new_private_key(self, is_compressed=False): - return self._backend.new_private_key() + (b"\x01" if is_compressed else b"") - - - def private_to_public(self, private_key): - if len(private_key) == self._backend.public_key_length: - is_compressed = False - elif len(private_key) == self._backend.public_key_length + 1 and private_key[-1] == 1: - is_compressed = True - private_key = private_key[:-1] - else: - raise ValueError("Private key has invalid length") - x, y = self._backend.private_to_public(private_key) - return self._encode_public_key(x, y, is_compressed=is_compressed) - - - def private_to_wif(self, private_key): - return base58.b58encode_check(b"\x80" + private_key) - - - def wif_to_private(self, wif): - dec = base58.b58decode_check(wif) - if dec[0] != 0x80: - raise ValueError("Invalid network (expected mainnet)") - return dec[1:] - - - def public_to_address(self, public_key): - h = hashlib.sha256(public_key).digest() - hash160 = ripemd160(h).digest() - return base58.b58encode_check(b"\x00" + hash160) - - - def private_to_address(self, private_key): - # Kinda useless but left for quick migration from pybitcointools - return self.public_to_address(self.private_to_public(private_key)) - - - def derive(self, private_key, public_key): - if len(private_key) == self._backend.public_key_length + 1 and private_key[-1] == 1: - private_key = private_key[:-1] - if len(private_key) != self._backend.public_key_length: - raise ValueError("Private key has invalid length") - if not isinstance(public_key, tuple): - public_key = self._decode_public_key(public_key) - return self._backend.ecdh(private_key, public_key) - - - def _digest(self, data, hash): - if hash is None: - return data - elif callable(hash): - return hash(data) - elif hash == "sha1": - return hashlib.sha1(data).digest() - elif hash == "sha256": - return hashlib.sha256(data).digest() - elif hash == "sha512": - return hashlib.sha512(data).digest() - else: - raise ValueError("Unknown hash/derivation method") - - - # High-level functions - def encrypt(self, data, public_key, algo="aes-256-cbc", derivation="sha256", mac="hmac-sha256", return_aes_key=False): - # Generate ephemeral private key - private_key = self.new_private_key() - - # Derive key - ecdh = self.derive(private_key, public_key) - key = self._digest(ecdh, derivation) - k_enc_len = self._aes.get_algo_key_length(algo) - if len(key) < k_enc_len: - raise ValueError("Too short digest") - k_enc, k_mac = key[:k_enc_len], key[k_enc_len:] - - # Encrypt - ciphertext, iv = self._aes.encrypt(data, k_enc, algo=algo) - ephem_public_key = self.private_to_public(private_key) - ephem_public_key = self._decode_public_key(ephem_public_key) - ephem_public_key = self._encode_public_key(*ephem_public_key, raw=False) - ciphertext = iv + ephem_public_key + ciphertext - - # Add MAC tag - if callable(mac): - tag = mac(k_mac, ciphertext) - elif mac == "hmac-sha256": - h = hmac.new(k_mac, digestmod="sha256") - h.update(ciphertext) - tag = h.digest() - elif mac == "hmac-sha512": - h = hmac.new(k_mac, digestmod="sha512") - h.update(ciphertext) - tag = h.digest() - elif mac is None: - tag = b"" - else: - raise ValueError("Unsupported MAC") - - if return_aes_key: - return ciphertext + tag, k_enc - else: - return ciphertext + tag - - - def decrypt(self, ciphertext, private_key, algo="aes-256-cbc", derivation="sha256", mac="hmac-sha256"): - # Get MAC tag - if callable(mac): - tag_length = mac.digest_size - elif mac == "hmac-sha256": - tag_length = hmac.new(b"", digestmod="sha256").digest_size - elif mac == "hmac-sha512": - tag_length = hmac.new(b"", digestmod="sha512").digest_size - elif mac is None: - tag_length = 0 - else: - raise ValueError("Unsupported MAC") - - if len(ciphertext) < tag_length: - raise ValueError("Ciphertext is too small to contain MAC tag") - if tag_length == 0: - tag = b"" - else: - ciphertext, tag = ciphertext[:-tag_length], ciphertext[-tag_length:] - - orig_ciphertext = ciphertext - - if len(ciphertext) < 16: - raise ValueError("Ciphertext is too small to contain IV") - iv, ciphertext = ciphertext[:16], ciphertext[16:] - - public_key, pos = self._decode_public_key_openssl(ciphertext, partial=True) - ciphertext = ciphertext[pos:] - - # Derive key - ecdh = self.derive(private_key, public_key) - key = self._digest(ecdh, derivation) - k_enc_len = self._aes.get_algo_key_length(algo) - if len(key) < k_enc_len: - raise ValueError("Too short digest") - k_enc, k_mac = key[:k_enc_len], key[k_enc_len:] - - # Verify MAC tag - if callable(mac): - expected_tag = mac(k_mac, orig_ciphertext) - elif mac == "hmac-sha256": - h = hmac.new(k_mac, digestmod="sha256") - h.update(orig_ciphertext) - expected_tag = h.digest() - elif mac == "hmac-sha512": - h = hmac.new(k_mac, digestmod="sha512") - h.update(orig_ciphertext) - expected_tag = h.digest() - elif mac is None: - expected_tag = b"" - - if not hmac.compare_digest(tag, expected_tag): - raise ValueError("Invalid MAC tag") - - return self._aes.decrypt(ciphertext, iv, k_enc, algo=algo) - - - def sign(self, data, private_key, hash="sha256", recoverable=False, entropy=None): - if len(private_key) == self._backend.public_key_length: - is_compressed = False - elif len(private_key) == self._backend.public_key_length + 1 and private_key[-1] == 1: - is_compressed = True - private_key = private_key[:-1] - else: - raise ValueError("Private key has invalid length") - - data = self._digest(data, hash) - if not entropy: - v = b"\x01" * len(data) - k = b"\x00" * len(data) - k = hmac.new(k, v + b"\x00" + private_key + data, "sha256").digest() - v = hmac.new(k, v, "sha256").digest() - k = hmac.new(k, v + b"\x01" + private_key + data, "sha256").digest() - v = hmac.new(k, v, "sha256").digest() - entropy = hmac.new(k, v, "sha256").digest() - return self._backend.sign(data, private_key, recoverable, is_compressed, entropy=entropy) - - - def recover(self, signature, data, hash="sha256"): - # Sanity check: is this signature recoverable? - if len(signature) != 1 + 2 * self._backend.public_key_length: - raise ValueError("Cannot recover an unrecoverable signature") - x, y = self._backend.recover(signature, self._digest(data, hash)) - is_compressed = signature[0] >= 31 - return self._encode_public_key(x, y, is_compressed=is_compressed) - - - def verify(self, signature, data, public_key, hash="sha256"): - if len(signature) == 1 + 2 * self._backend.public_key_length: - # Recoverable signature - signature = signature[1:] - if len(signature) != 2 * self._backend.public_key_length: - raise ValueError("Invalid signature format") - if not isinstance(public_key, tuple): - public_key = self._decode_public_key(public_key) - return self._backend.verify(signature, self._digest(data, hash), public_key) - - - def derive_child(self, seed, child): - # Based on BIP32 - if not 0 <= child < 2 ** 31: - raise ValueError("Invalid child index") - return self._backend.derive_child(seed, child) diff --git a/src/lib/sslcrypto/_ripemd.py b/src/lib/sslcrypto/_ripemd.py deleted file mode 100644 index 89377cc2..00000000 --- a/src/lib/sslcrypto/_ripemd.py +++ /dev/null @@ -1,375 +0,0 @@ -# Copyright (c) 2001 Markus Friedl. All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR -# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES -# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. -# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT -# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF -# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# pylint: skip-file - -import sys - -digest_size = 20 -digestsize = 20 - -class RIPEMD160: - """ - Return a new RIPEMD160 object. An optional string argument - may be provided; if present, this string will be automatically - hashed. - """ - - def __init__(self, arg=None): - self.ctx = RMDContext() - if arg: - self.update(arg) - self.dig = None - - def update(self, arg): - RMD160Update(self.ctx, arg, len(arg)) - self.dig = None - - def digest(self): - if self.dig: - return self.dig - ctx = self.ctx.copy() - self.dig = RMD160Final(self.ctx) - self.ctx = ctx - return self.dig - - def hexdigest(self): - dig = self.digest() - hex_digest = "" - for d in dig: - hex_digest += "%02x" % d - return hex_digest - - def copy(self): - import copy - return copy.deepcopy(self) - - - -def new(arg=None): - """ - Return a new RIPEMD160 object. An optional string argument - may be provided; if present, this string will be automatically - hashed. - """ - return RIPEMD160(arg) - - - -# -# Private. -# - -class RMDContext: - def __init__(self): - self.state = [0x67452301, 0xEFCDAB89, 0x98BADCFE, - 0x10325476, 0xC3D2E1F0] # uint32 - self.count = 0 # uint64 - self.buffer = [0] * 64 # uchar - def copy(self): - ctx = RMDContext() - ctx.state = self.state[:] - ctx.count = self.count - ctx.buffer = self.buffer[:] - return ctx - -K0 = 0x00000000 -K1 = 0x5A827999 -K2 = 0x6ED9EBA1 -K3 = 0x8F1BBCDC -K4 = 0xA953FD4E - -KK0 = 0x50A28BE6 -KK1 = 0x5C4DD124 -KK2 = 0x6D703EF3 -KK3 = 0x7A6D76E9 -KK4 = 0x00000000 - -def ROL(n, x): - return ((x << n) & 0xffffffff) | (x >> (32 - n)) - -def F0(x, y, z): - return x ^ y ^ z - -def F1(x, y, z): - return (x & y) | (((~x) % 0x100000000) & z) - -def F2(x, y, z): - return (x | ((~y) % 0x100000000)) ^ z - -def F3(x, y, z): - return (x & z) | (((~z) % 0x100000000) & y) - -def F4(x, y, z): - return x ^ (y | ((~z) % 0x100000000)) - -def R(a, b, c, d, e, Fj, Kj, sj, rj, X): - a = ROL(sj, (a + Fj(b, c, d) + X[rj] + Kj) % 0x100000000) + e - c = ROL(10, c) - return a % 0x100000000, c - -PADDING = [0x80] + [0] * 63 - -import sys -import struct - -def RMD160Transform(state, block): # uint32 state[5], uchar block[64] - x = [0] * 16 - if sys.byteorder == "little": - x = struct.unpack("<16L", bytes(block[0:64])) - else: - raise ValueError("Big-endian platforms are not supported") - a = state[0] - b = state[1] - c = state[2] - d = state[3] - e = state[4] - - # Round 1 - a, c = R(a, b, c, d, e, F0, K0, 11, 0, x) - e, b = R(e, a, b, c, d, F0, K0, 14, 1, x) - d, a = R(d, e, a, b, c, F0, K0, 15, 2, x) - c, e = R(c, d, e, a, b, F0, K0, 12, 3, x) - b, d = R(b, c, d, e, a, F0, K0, 5, 4, x) - a, c = R(a, b, c, d, e, F0, K0, 8, 5, x) - e, b = R(e, a, b, c, d, F0, K0, 7, 6, x) - d, a = R(d, e, a, b, c, F0, K0, 9, 7, x) - c, e = R(c, d, e, a, b, F0, K0, 11, 8, x) - b, d = R(b, c, d, e, a, F0, K0, 13, 9, x) - a, c = R(a, b, c, d, e, F0, K0, 14, 10, x) - e, b = R(e, a, b, c, d, F0, K0, 15, 11, x) - d, a = R(d, e, a, b, c, F0, K0, 6, 12, x) - c, e = R(c, d, e, a, b, F0, K0, 7, 13, x) - b, d = R(b, c, d, e, a, F0, K0, 9, 14, x) - a, c = R(a, b, c, d, e, F0, K0, 8, 15, x) # #15 - # Round 2 - e, b = R(e, a, b, c, d, F1, K1, 7, 7, x) - d, a = R(d, e, a, b, c, F1, K1, 6, 4, x) - c, e = R(c, d, e, a, b, F1, K1, 8, 13, x) - b, d = R(b, c, d, e, a, F1, K1, 13, 1, x) - a, c = R(a, b, c, d, e, F1, K1, 11, 10, x) - e, b = R(e, a, b, c, d, F1, K1, 9, 6, x) - d, a = R(d, e, a, b, c, F1, K1, 7, 15, x) - c, e = R(c, d, e, a, b, F1, K1, 15, 3, x) - b, d = R(b, c, d, e, a, F1, K1, 7, 12, x) - a, c = R(a, b, c, d, e, F1, K1, 12, 0, x) - e, b = R(e, a, b, c, d, F1, K1, 15, 9, x) - d, a = R(d, e, a, b, c, F1, K1, 9, 5, x) - c, e = R(c, d, e, a, b, F1, K1, 11, 2, x) - b, d = R(b, c, d, e, a, F1, K1, 7, 14, x) - a, c = R(a, b, c, d, e, F1, K1, 13, 11, x) - e, b = R(e, a, b, c, d, F1, K1, 12, 8, x) # #31 - # Round 3 - d, a = R(d, e, a, b, c, F2, K2, 11, 3, x) - c, e = R(c, d, e, a, b, F2, K2, 13, 10, x) - b, d = R(b, c, d, e, a, F2, K2, 6, 14, x) - a, c = R(a, b, c, d, e, F2, K2, 7, 4, x) - e, b = R(e, a, b, c, d, F2, K2, 14, 9, x) - d, a = R(d, e, a, b, c, F2, K2, 9, 15, x) - c, e = R(c, d, e, a, b, F2, K2, 13, 8, x) - b, d = R(b, c, d, e, a, F2, K2, 15, 1, x) - a, c = R(a, b, c, d, e, F2, K2, 14, 2, x) - e, b = R(e, a, b, c, d, F2, K2, 8, 7, x) - d, a = R(d, e, a, b, c, F2, K2, 13, 0, x) - c, e = R(c, d, e, a, b, F2, K2, 6, 6, x) - b, d = R(b, c, d, e, a, F2, K2, 5, 13, x) - a, c = R(a, b, c, d, e, F2, K2, 12, 11, x) - e, b = R(e, a, b, c, d, F2, K2, 7, 5, x) - d, a = R(d, e, a, b, c, F2, K2, 5, 12, x) # #47 - # Round 4 - c, e = R(c, d, e, a, b, F3, K3, 11, 1, x) - b, d = R(b, c, d, e, a, F3, K3, 12, 9, x) - a, c = R(a, b, c, d, e, F3, K3, 14, 11, x) - e, b = R(e, a, b, c, d, F3, K3, 15, 10, x) - d, a = R(d, e, a, b, c, F3, K3, 14, 0, x) - c, e = R(c, d, e, a, b, F3, K3, 15, 8, x) - b, d = R(b, c, d, e, a, F3, K3, 9, 12, x) - a, c = R(a, b, c, d, e, F3, K3, 8, 4, x) - e, b = R(e, a, b, c, d, F3, K3, 9, 13, x) - d, a = R(d, e, a, b, c, F3, K3, 14, 3, x) - c, e = R(c, d, e, a, b, F3, K3, 5, 7, x) - b, d = R(b, c, d, e, a, F3, K3, 6, 15, x) - a, c = R(a, b, c, d, e, F3, K3, 8, 14, x) - e, b = R(e, a, b, c, d, F3, K3, 6, 5, x) - d, a = R(d, e, a, b, c, F3, K3, 5, 6, x) - c, e = R(c, d, e, a, b, F3, K3, 12, 2, x) # #63 - # Round 5 - b, d = R(b, c, d, e, a, F4, K4, 9, 4, x) - a, c = R(a, b, c, d, e, F4, K4, 15, 0, x) - e, b = R(e, a, b, c, d, F4, K4, 5, 5, x) - d, a = R(d, e, a, b, c, F4, K4, 11, 9, x) - c, e = R(c, d, e, a, b, F4, K4, 6, 7, x) - b, d = R(b, c, d, e, a, F4, K4, 8, 12, x) - a, c = R(a, b, c, d, e, F4, K4, 13, 2, x) - e, b = R(e, a, b, c, d, F4, K4, 12, 10, x) - d, a = R(d, e, a, b, c, F4, K4, 5, 14, x) - c, e = R(c, d, e, a, b, F4, K4, 12, 1, x) - b, d = R(b, c, d, e, a, F4, K4, 13, 3, x) - a, c = R(a, b, c, d, e, F4, K4, 14, 8, x) - e, b = R(e, a, b, c, d, F4, K4, 11, 11, x) - d, a = R(d, e, a, b, c, F4, K4, 8, 6, x) - c, e = R(c, d, e, a, b, F4, K4, 5, 15, x) - b, d = R(b, c, d, e, a, F4, K4, 6, 13, x) # #79 - - aa = a - bb = b - cc = c - dd = d - ee = e - - a = state[0] - b = state[1] - c = state[2] - d = state[3] - e = state[4] - - # Parallel round 1 - a, c = R(a, b, c, d, e, F4, KK0, 8, 5, x) - e, b = R(e, a, b, c, d, F4, KK0, 9, 14, x) - d, a = R(d, e, a, b, c, F4, KK0, 9, 7, x) - c, e = R(c, d, e, a, b, F4, KK0, 11, 0, x) - b, d = R(b, c, d, e, a, F4, KK0, 13, 9, x) - a, c = R(a, b, c, d, e, F4, KK0, 15, 2, x) - e, b = R(e, a, b, c, d, F4, KK0, 15, 11, x) - d, a = R(d, e, a, b, c, F4, KK0, 5, 4, x) - c, e = R(c, d, e, a, b, F4, KK0, 7, 13, x) - b, d = R(b, c, d, e, a, F4, KK0, 7, 6, x) - a, c = R(a, b, c, d, e, F4, KK0, 8, 15, x) - e, b = R(e, a, b, c, d, F4, KK0, 11, 8, x) - d, a = R(d, e, a, b, c, F4, KK0, 14, 1, x) - c, e = R(c, d, e, a, b, F4, KK0, 14, 10, x) - b, d = R(b, c, d, e, a, F4, KK0, 12, 3, x) - a, c = R(a, b, c, d, e, F4, KK0, 6, 12, x) # #15 - # Parallel round 2 - e, b = R(e, a, b, c, d, F3, KK1, 9, 6, x) - d, a = R(d, e, a, b, c, F3, KK1, 13, 11, x) - c, e = R(c, d, e, a, b, F3, KK1, 15, 3, x) - b, d = R(b, c, d, e, a, F3, KK1, 7, 7, x) - a, c = R(a, b, c, d, e, F3, KK1, 12, 0, x) - e, b = R(e, a, b, c, d, F3, KK1, 8, 13, x) - d, a = R(d, e, a, b, c, F3, KK1, 9, 5, x) - c, e = R(c, d, e, a, b, F3, KK1, 11, 10, x) - b, d = R(b, c, d, e, a, F3, KK1, 7, 14, x) - a, c = R(a, b, c, d, e, F3, KK1, 7, 15, x) - e, b = R(e, a, b, c, d, F3, KK1, 12, 8, x) - d, a = R(d, e, a, b, c, F3, KK1, 7, 12, x) - c, e = R(c, d, e, a, b, F3, KK1, 6, 4, x) - b, d = R(b, c, d, e, a, F3, KK1, 15, 9, x) - a, c = R(a, b, c, d, e, F3, KK1, 13, 1, x) - e, b = R(e, a, b, c, d, F3, KK1, 11, 2, x) # #31 - # Parallel round 3 - d, a = R(d, e, a, b, c, F2, KK2, 9, 15, x) - c, e = R(c, d, e, a, b, F2, KK2, 7, 5, x) - b, d = R(b, c, d, e, a, F2, KK2, 15, 1, x) - a, c = R(a, b, c, d, e, F2, KK2, 11, 3, x) - e, b = R(e, a, b, c, d, F2, KK2, 8, 7, x) - d, a = R(d, e, a, b, c, F2, KK2, 6, 14, x) - c, e = R(c, d, e, a, b, F2, KK2, 6, 6, x) - b, d = R(b, c, d, e, a, F2, KK2, 14, 9, x) - a, c = R(a, b, c, d, e, F2, KK2, 12, 11, x) - e, b = R(e, a, b, c, d, F2, KK2, 13, 8, x) - d, a = R(d, e, a, b, c, F2, KK2, 5, 12, x) - c, e = R(c, d, e, a, b, F2, KK2, 14, 2, x) - b, d = R(b, c, d, e, a, F2, KK2, 13, 10, x) - a, c = R(a, b, c, d, e, F2, KK2, 13, 0, x) - e, b = R(e, a, b, c, d, F2, KK2, 7, 4, x) - d, a = R(d, e, a, b, c, F2, KK2, 5, 13, x) # #47 - # Parallel round 4 - c, e = R(c, d, e, a, b, F1, KK3, 15, 8, x) - b, d = R(b, c, d, e, a, F1, KK3, 5, 6, x) - a, c = R(a, b, c, d, e, F1, KK3, 8, 4, x) - e, b = R(e, a, b, c, d, F1, KK3, 11, 1, x) - d, a = R(d, e, a, b, c, F1, KK3, 14, 3, x) - c, e = R(c, d, e, a, b, F1, KK3, 14, 11, x) - b, d = R(b, c, d, e, a, F1, KK3, 6, 15, x) - a, c = R(a, b, c, d, e, F1, KK3, 14, 0, x) - e, b = R(e, a, b, c, d, F1, KK3, 6, 5, x) - d, a = R(d, e, a, b, c, F1, KK3, 9, 12, x) - c, e = R(c, d, e, a, b, F1, KK3, 12, 2, x) - b, d = R(b, c, d, e, a, F1, KK3, 9, 13, x) - a, c = R(a, b, c, d, e, F1, KK3, 12, 9, x) - e, b = R(e, a, b, c, d, F1, KK3, 5, 7, x) - d, a = R(d, e, a, b, c, F1, KK3, 15, 10, x) - c, e = R(c, d, e, a, b, F1, KK3, 8, 14, x) # #63 - # Parallel round 5 - b, d = R(b, c, d, e, a, F0, KK4, 8, 12, x) - a, c = R(a, b, c, d, e, F0, KK4, 5, 15, x) - e, b = R(e, a, b, c, d, F0, KK4, 12, 10, x) - d, a = R(d, e, a, b, c, F0, KK4, 9, 4, x) - c, e = R(c, d, e, a, b, F0, KK4, 12, 1, x) - b, d = R(b, c, d, e, a, F0, KK4, 5, 5, x) - a, c = R(a, b, c, d, e, F0, KK4, 14, 8, x) - e, b = R(e, a, b, c, d, F0, KK4, 6, 7, x) - d, a = R(d, e, a, b, c, F0, KK4, 8, 6, x) - c, e = R(c, d, e, a, b, F0, KK4, 13, 2, x) - b, d = R(b, c, d, e, a, F0, KK4, 6, 13, x) - a, c = R(a, b, c, d, e, F0, KK4, 5, 14, x) - e, b = R(e, a, b, c, d, F0, KK4, 15, 0, x) - d, a = R(d, e, a, b, c, F0, KK4, 13, 3, x) - c, e = R(c, d, e, a, b, F0, KK4, 11, 9, x) - b, d = R(b, c, d, e, a, F0, KK4, 11, 11, x) # #79 - - t = (state[1] + cc + d) % 0x100000000 - state[1] = (state[2] + dd + e) % 0x100000000 - state[2] = (state[3] + ee + a) % 0x100000000 - state[3] = (state[4] + aa + b) % 0x100000000 - state[4] = (state[0] + bb + c) % 0x100000000 - state[0] = t % 0x100000000 - - -def RMD160Update(ctx, inp, inplen): - if type(inp) == str: - inp = [ord(i)&0xff for i in inp] - - have = int((ctx.count // 8) % 64) - inplen = int(inplen) - need = 64 - have - ctx.count += 8 * inplen - off = 0 - if inplen >= need: - if have: - for i in range(need): - ctx.buffer[have + i] = inp[i] - RMD160Transform(ctx.state, ctx.buffer) - off = need - have = 0 - while off + 64 <= inplen: - RMD160Transform(ctx.state, inp[off:]) #<--- - off += 64 - if off < inplen: - # memcpy(ctx->buffer + have, input+off, len-off) - for i in range(inplen - off): - ctx.buffer[have + i] = inp[off + i] - -def RMD160Final(ctx): - size = struct.pack("= self.n: - return self.jacobian_multiply(a, n % self.n, secret) - half = self.jacobian_multiply(a, n // 2, secret) - half_sq = self.jacobian_double(half) - if secret: - # A constant-time implementation - half_sq_a = self.jacobian_add(half_sq, a) - if n % 2 == 0: - result = half_sq - if n % 2 == 1: - result = half_sq_a - return result - else: - if n % 2 == 0: - return half_sq - return self.jacobian_add(half_sq, a) - - - def jacobian_shamir(self, a, n, b, m): - ab = self.jacobian_add(a, b) - if n < 0 or n >= self.n: - n %= self.n - if m < 0 or m >= self.n: - m %= self.n - res = 0, 0, 1 # point on infinity - for i in range(self.n_length - 1, -1, -1): - res = self.jacobian_double(res) - has_n = n & (1 << i) - has_m = m & (1 << i) - if has_n: - if has_m == 0: - res = self.jacobian_add(res, a) - if has_m != 0: - res = self.jacobian_add(res, ab) - else: - if has_m == 0: - res = self.jacobian_add(res, (0, 0, 1)) # Try not to leak - if has_m != 0: - res = self.jacobian_add(res, b) - return res - - - def fast_multiply(self, a, n, secret=False): - return self.from_jacobian(self.jacobian_multiply(self.to_jacobian(a), n, secret)) - - - def fast_add(self, a, b): - return self.from_jacobian(self.jacobian_add(self.to_jacobian(a), self.to_jacobian(b))) - - - def fast_shamir(self, a, n, b, m): - return self.from_jacobian(self.jacobian_shamir(self.to_jacobian(a), n, self.to_jacobian(b), m)) - - - def is_on_curve(self, a): - x, y = a - # Simple arithmetic check - if (pow(x, 3, self.p) + self.a * x + self.b) % self.p != y * y % self.p: - return False - # nP = point-at-infinity - return self.isinf(self.jacobian_multiply(self.to_jacobian(a), self.n)) diff --git a/src/lib/sslcrypto/fallback/_util.py b/src/lib/sslcrypto/fallback/_util.py deleted file mode 100644 index 2236ebee..00000000 --- a/src/lib/sslcrypto/fallback/_util.py +++ /dev/null @@ -1,79 +0,0 @@ -def int_to_bytes(raw, length): - data = [] - for _ in range(length): - data.append(raw % 256) - raw //= 256 - return bytes(data[::-1]) - - -def bytes_to_int(data): - raw = 0 - for byte in data: - raw = raw * 256 + byte - return raw - - -def legendre(a, p): - res = pow(a, (p - 1) // 2, p) - if res == p - 1: - return -1 - else: - return res - - -def inverse(a, n): - if a == 0: - return 0 - lm, hm = 1, 0 - low, high = a % n, n - while low > 1: - r = high // low - nm, new = hm - lm * r, high - low * r - lm, low, hm, high = nm, new, lm, low - return lm % n - - -def square_root_mod_prime(n, p): - if n == 0: - return 0 - if p == 2: - return n # We should never get here but it might be useful - if legendre(n, p) != 1: - raise ValueError("No square root") - # Optimizations - if p % 4 == 3: - return pow(n, (p + 1) // 4, p) - # 1. By factoring out powers of 2, find Q and S such that p - 1 = - # Q * 2 ** S with Q odd - q = p - 1 - s = 0 - while q % 2 == 0: - q //= 2 - s += 1 - # 2. Search for z in Z/pZ which is a quadratic non-residue - z = 1 - while legendre(z, p) != -1: - z += 1 - m, c, t, r = s, pow(z, q, p), pow(n, q, p), pow(n, (q + 1) // 2, p) - while True: - if t == 0: - return 0 - elif t == 1: - return r - # Use repeated squaring to find the least i, 0 < i < M, such - # that t ** (2 ** i) = 1 - t_sq = t - i = 0 - for i in range(1, m): - t_sq = t_sq * t_sq % p - if t_sq == 1: - break - else: - raise ValueError("Should never get here") - # Let b = c ** (2 ** (m - i - 1)) - b = pow(c, 2 ** (m - i - 1), p) - m = i - c = b * b % p - t = t * b * b % p - r = r * b % p - return r diff --git a/src/lib/sslcrypto/fallback/aes.py b/src/lib/sslcrypto/fallback/aes.py deleted file mode 100644 index e168bf34..00000000 --- a/src/lib/sslcrypto/fallback/aes.py +++ /dev/null @@ -1,101 +0,0 @@ -import os -import pyaes -from .._aes import AES - - -__all__ = ["aes"] - -class AESBackend: - def _get_algo_cipher_type(self, algo): - if not algo.startswith("aes-") or algo.count("-") != 2: - raise ValueError("Unknown cipher algorithm {}".format(algo)) - key_length, cipher_type = algo[4:].split("-") - if key_length not in ("128", "192", "256"): - raise ValueError("Unknown cipher algorithm {}".format(algo)) - if cipher_type not in ("cbc", "ctr", "cfb", "ofb"): - raise ValueError("Unknown cipher algorithm {}".format(algo)) - return cipher_type - - - def is_algo_supported(self, algo): - try: - self._get_algo_cipher_type(algo) - return True - except ValueError: - return False - - - def random(self, length): - return os.urandom(length) - - - def encrypt(self, data, key, algo="aes-256-cbc"): - cipher_type = self._get_algo_cipher_type(algo) - - # Generate random IV - iv = os.urandom(16) - - if cipher_type == "cbc": - cipher = pyaes.AESModeOfOperationCBC(key, iv=iv) - elif cipher_type == "ctr": - # The IV is actually a counter, not an IV but it does almost the - # same. Notice: pyaes always uses 1 as initial counter! Make sure - # not to call pyaes directly. - - # We kinda do two conversions here: from byte array to int here, and - # from int to byte array in pyaes internals. It's possible to fix that - # but I didn't notice any performance changes so I'm keeping clean code. - iv_int = 0 - for byte in iv: - iv_int = (iv_int * 256) + byte - counter = pyaes.Counter(iv_int) - cipher = pyaes.AESModeOfOperationCTR(key, counter=counter) - elif cipher_type == "cfb": - # Change segment size from default 8 bytes to 16 bytes for OpenSSL - # compatibility - cipher = pyaes.AESModeOfOperationCFB(key, iv, segment_size=16) - elif cipher_type == "ofb": - cipher = pyaes.AESModeOfOperationOFB(key, iv) - - encrypter = pyaes.Encrypter(cipher) - ciphertext = encrypter.feed(data) - ciphertext += encrypter.feed() - return ciphertext, iv - - - def decrypt(self, ciphertext, iv, key, algo="aes-256-cbc"): - cipher_type = self._get_algo_cipher_type(algo) - - if cipher_type == "cbc": - cipher = pyaes.AESModeOfOperationCBC(key, iv=iv) - elif cipher_type == "ctr": - # The IV is actually a counter, not an IV but it does almost the - # same. Notice: pyaes always uses 1 as initial counter! Make sure - # not to call pyaes directly. - - # We kinda do two conversions here: from byte array to int here, and - # from int to byte array in pyaes internals. It's possible to fix that - # but I didn't notice any performance changes so I'm keeping clean code. - iv_int = 0 - for byte in iv: - iv_int = (iv_int * 256) + byte - counter = pyaes.Counter(iv_int) - cipher = pyaes.AESModeOfOperationCTR(key, counter=counter) - elif cipher_type == "cfb": - # Change segment size from default 8 bytes to 16 bytes for OpenSSL - # compatibility - cipher = pyaes.AESModeOfOperationCFB(key, iv, segment_size=16) - elif cipher_type == "ofb": - cipher = pyaes.AESModeOfOperationOFB(key, iv) - - decrypter = pyaes.Decrypter(cipher) - data = decrypter.feed(ciphertext) - data += decrypter.feed() - return data - - - def get_backend(self): - return "fallback" - - -aes = AES(AESBackend()) diff --git a/src/lib/sslcrypto/fallback/ecc.py b/src/lib/sslcrypto/fallback/ecc.py deleted file mode 100644 index 6ca9a498..00000000 --- a/src/lib/sslcrypto/fallback/ecc.py +++ /dev/null @@ -1,199 +0,0 @@ -import hmac -import os -from ._jacobian import JacobianCurve -from .._ecc import ECC -from .aes import aes -from ._util import int_to_bytes, bytes_to_int, inverse, square_root_mod_prime - - -class EllipticCurveBackend: - def __init__(self, p, n, a, b, g): - self.p, self.n, self.a, self.b, self.g = p, n, a, b, g - self.jacobian = JacobianCurve(p, n, a, b, g) - - self.public_key_length = (len(bin(p).replace("0b", "")) + 7) // 8 - self.order_bitlength = len(bin(n).replace("0b", "")) - - - def _int_to_bytes(self, raw, len=None): - return int_to_bytes(raw, len or self.public_key_length) - - - def decompress_point(self, public_key): - # Parse & load data - x = bytes_to_int(public_key[1:]) - # Calculate Y - y_square = (pow(x, 3, self.p) + self.a * x + self.b) % self.p - try: - y = square_root_mod_prime(y_square, self.p) - except Exception: - raise ValueError("Invalid public key") from None - if y % 2 != public_key[0] - 0x02: - y = self.p - y - return self._int_to_bytes(x), self._int_to_bytes(y) - - - def new_private_key(self): - while True: - private_key = os.urandom(self.public_key_length) - if bytes_to_int(private_key) >= self.n: - continue - return private_key - - - def private_to_public(self, private_key): - raw = bytes_to_int(private_key) - x, y = self.jacobian.fast_multiply(self.g, raw) - return self._int_to_bytes(x), self._int_to_bytes(y) - - - def ecdh(self, private_key, public_key): - x, y = public_key - x, y = bytes_to_int(x), bytes_to_int(y) - private_key = bytes_to_int(private_key) - x, _ = self.jacobian.fast_multiply((x, y), private_key, secret=True) - return self._int_to_bytes(x) - - - def _subject_to_int(self, subject): - return bytes_to_int(subject[:(self.order_bitlength + 7) // 8]) - - - def sign(self, subject, raw_private_key, recoverable, is_compressed, entropy): - z = self._subject_to_int(subject) - private_key = bytes_to_int(raw_private_key) - k = bytes_to_int(entropy) - - # Fix k length to prevent Minerva. Increasing multiplier by a - # multiple of order doesn't break anything. This fix was ported - # from python-ecdsa - ks = k + self.n - kt = ks + self.n - ks_len = len(bin(ks).replace("0b", "")) // 8 - kt_len = len(bin(kt).replace("0b", "")) // 8 - if ks_len == kt_len: - k = kt - else: - k = ks - px, py = self.jacobian.fast_multiply(self.g, k, secret=True) - - r = px % self.n - if r == 0: - # Invalid k - raise ValueError("Invalid k") - - s = (inverse(k, self.n) * (z + (private_key * r))) % self.n - if s == 0: - # Invalid k - raise ValueError("Invalid k") - - inverted = False - if s * 2 >= self.n: - s = self.n - s - inverted = True - rs_buf = self._int_to_bytes(r) + self._int_to_bytes(s) - - if recoverable: - recid = (py % 2) ^ inverted - recid += 2 * int(px // self.n) - if is_compressed: - return bytes([31 + recid]) + rs_buf - else: - if recid >= 4: - raise ValueError("Too big recovery ID, use compressed address instead") - return bytes([27 + recid]) + rs_buf - else: - return rs_buf - - - def recover(self, signature, subject): - z = self._subject_to_int(subject) - - recid = signature[0] - 27 if signature[0] < 31 else signature[0] - 31 - r = bytes_to_int(signature[1:self.public_key_length + 1]) - s = bytes_to_int(signature[self.public_key_length + 1:]) - - # Verify bounds - if not 0 <= recid < 2 * (self.p // self.n + 1): - raise ValueError("Invalid recovery ID") - if r >= self.n: - raise ValueError("r is out of bounds") - if s >= self.n: - raise ValueError("s is out of bounds") - - rinv = inverse(r, self.n) - u1 = (-z * rinv) % self.n - u2 = (s * rinv) % self.n - - # Recover R - rx = r + (recid // 2) * self.n - if rx >= self.p: - raise ValueError("Rx is out of bounds") - - # Almost copied from decompress_point - ry_square = (pow(rx, 3, self.p) + self.a * rx + self.b) % self.p - try: - ry = square_root_mod_prime(ry_square, self.p) - except Exception: - raise ValueError("Invalid recovered public key") from None - - # Ensure the point is correct - if ry % 2 != recid % 2: - # Fix Ry sign - ry = self.p - ry - - x, y = self.jacobian.fast_shamir(self.g, u1, (rx, ry), u2) - return self._int_to_bytes(x), self._int_to_bytes(y) - - - def verify(self, signature, subject, public_key): - z = self._subject_to_int(subject) - - r = bytes_to_int(signature[:self.public_key_length]) - s = bytes_to_int(signature[self.public_key_length:]) - - # Verify bounds - if r >= self.n: - raise ValueError("r is out of bounds") - if s >= self.n: - raise ValueError("s is out of bounds") - - public_key = [bytes_to_int(c) for c in public_key] - - # Ensure that the public key is correct - if not self.jacobian.is_on_curve(public_key): - raise ValueError("Public key is not on curve") - - sinv = inverse(s, self.n) - u1 = (z * sinv) % self.n - u2 = (r * sinv) % self.n - - x1, _ = self.jacobian.fast_shamir(self.g, u1, public_key, u2) - if r != x1 % self.n: - raise ValueError("Invalid signature") - - return True - - - def derive_child(self, seed, child): - # Round 1 - h = hmac.new(key=b"Bitcoin seed", msg=seed, digestmod="sha512").digest() - private_key1 = h[:32] - x, y = self.private_to_public(private_key1) - public_key1 = bytes([0x02 + (y[-1] % 2)]) + x - private_key1 = bytes_to_int(private_key1) - - # Round 2 - msg = public_key1 + self._int_to_bytes(child, 4) - h = hmac.new(key=h[32:], msg=msg, digestmod="sha512").digest() - private_key2 = bytes_to_int(h[:32]) - - return self._int_to_bytes((private_key1 + private_key2) % self.n) - - - @classmethod - def get_backend(cls): - return "fallback" - - -ecc = ECC(EllipticCurveBackend, aes) diff --git a/src/lib/sslcrypto/fallback/rsa.py b/src/lib/sslcrypto/fallback/rsa.py deleted file mode 100644 index 54b8d2cb..00000000 --- a/src/lib/sslcrypto/fallback/rsa.py +++ /dev/null @@ -1,8 +0,0 @@ -# pylint: disable=too-few-public-methods - -class RSA: - def get_backend(self): - return "fallback" - - -rsa = RSA() diff --git a/src/lib/sslcrypto/openssl/__init__.py b/src/lib/sslcrypto/openssl/__init__.py deleted file mode 100644 index a32ae692..00000000 --- a/src/lib/sslcrypto/openssl/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .aes import aes -from .ecc import ecc -from .rsa import rsa diff --git a/src/lib/sslcrypto/openssl/aes.py b/src/lib/sslcrypto/openssl/aes.py deleted file mode 100644 index c58451d5..00000000 --- a/src/lib/sslcrypto/openssl/aes.py +++ /dev/null @@ -1,156 +0,0 @@ -import ctypes -import threading -from .._aes import AES -from ..fallback.aes import aes as fallback_aes -from .library import lib, openssl_backend - - -# Initialize functions -try: - lib.EVP_CIPHER_CTX_new.restype = ctypes.POINTER(ctypes.c_char) -except AttributeError: - pass -lib.EVP_get_cipherbyname.restype = ctypes.POINTER(ctypes.c_char) - - -thread_local = threading.local() - - -class Context: - def __init__(self, ptr, do_free): - self.lib = lib - self.ptr = ptr - self.do_free = do_free - - - def __del__(self): - if self.do_free: - self.lib.EVP_CIPHER_CTX_free(self.ptr) - - -class AESBackend: - ALGOS = ( - "aes-128-cbc", "aes-192-cbc", "aes-256-cbc", - "aes-128-ctr", "aes-192-ctr", "aes-256-ctr", - "aes-128-cfb", "aes-192-cfb", "aes-256-cfb", - "aes-128-ofb", "aes-192-ofb", "aes-256-ofb" - ) - - def __init__(self): - self.is_supported_ctx_new = hasattr(lib, "EVP_CIPHER_CTX_new") - self.is_supported_ctx_reset = hasattr(lib, "EVP_CIPHER_CTX_reset") - - - def _get_ctx(self): - if not hasattr(thread_local, "ctx"): - if self.is_supported_ctx_new: - thread_local.ctx = Context(lib.EVP_CIPHER_CTX_new(), True) - else: - # 1 KiB ought to be enough for everybody. We don't know the real - # size of the context buffer because we are unsure about padding and - # pointer size - thread_local.ctx = Context(ctypes.create_string_buffer(1024), False) - return thread_local.ctx.ptr - - - def get_backend(self): - return openssl_backend - - - def _get_cipher(self, algo): - if algo not in self.ALGOS: - raise ValueError("Unknown cipher algorithm {}".format(algo)) - cipher = lib.EVP_get_cipherbyname(algo.encode()) - if not cipher: - raise ValueError("Unknown cipher algorithm {}".format(algo)) - return cipher - - - def is_algo_supported(self, algo): - try: - self._get_cipher(algo) - return True - except ValueError: - return False - - - def random(self, length): - entropy = ctypes.create_string_buffer(length) - lib.RAND_bytes(entropy, length) - return bytes(entropy) - - - def encrypt(self, data, key, algo="aes-256-cbc"): - # Initialize context - ctx = self._get_ctx() - if not self.is_supported_ctx_new: - lib.EVP_CIPHER_CTX_init(ctx) - try: - lib.EVP_EncryptInit_ex(ctx, self._get_cipher(algo), None, None, None) - - # Generate random IV - iv_length = 16 - iv = self.random(iv_length) - - # Set key and IV - lib.EVP_EncryptInit_ex(ctx, None, None, key, iv) - - # Actually encrypt - block_size = 16 - output = ctypes.create_string_buffer((len(data) // block_size + 1) * block_size) - output_len = ctypes.c_int() - - if not lib.EVP_CipherUpdate(ctx, output, ctypes.byref(output_len), data, len(data)): - raise ValueError("Could not feed cipher with data") - - new_output = ctypes.byref(output, output_len.value) - output_len2 = ctypes.c_int() - if not lib.EVP_CipherFinal_ex(ctx, new_output, ctypes.byref(output_len2)): - raise ValueError("Could not finalize cipher") - - ciphertext = output[:output_len.value + output_len2.value] - return ciphertext, iv - finally: - if self.is_supported_ctx_reset: - lib.EVP_CIPHER_CTX_reset(ctx) - else: - lib.EVP_CIPHER_CTX_cleanup(ctx) - - - def decrypt(self, ciphertext, iv, key, algo="aes-256-cbc"): - # Initialize context - ctx = self._get_ctx() - if not self.is_supported_ctx_new: - lib.EVP_CIPHER_CTX_init(ctx) - try: - lib.EVP_DecryptInit_ex(ctx, self._get_cipher(algo), None, None, None) - - # Make sure IV length is correct - iv_length = 16 - if len(iv) != iv_length: - raise ValueError("Expected IV to be {} bytes, got {} bytes".format(iv_length, len(iv))) - - # Set key and IV - lib.EVP_DecryptInit_ex(ctx, None, None, key, iv) - - # Actually decrypt - output = ctypes.create_string_buffer(len(ciphertext)) - output_len = ctypes.c_int() - - if not lib.EVP_DecryptUpdate(ctx, output, ctypes.byref(output_len), ciphertext, len(ciphertext)): - raise ValueError("Could not feed decipher with ciphertext") - - new_output = ctypes.byref(output, output_len.value) - output_len2 = ctypes.c_int() - if not lib.EVP_DecryptFinal_ex(ctx, new_output, ctypes.byref(output_len2)): - raise ValueError("Could not finalize decipher") - - return output[:output_len.value + output_len2.value] - finally: - if self.is_supported_ctx_reset: - lib.EVP_CIPHER_CTX_reset(ctx) - else: - lib.EVP_CIPHER_CTX_cleanup(ctx) - - -aes = AES(AESBackend(), fallback_aes) diff --git a/src/lib/sslcrypto/openssl/discovery.py b/src/lib/sslcrypto/openssl/discovery.py deleted file mode 100644 index 0ebb0299..00000000 --- a/src/lib/sslcrypto/openssl/discovery.py +++ /dev/null @@ -1,3 +0,0 @@ -# Can be redefined by user -def discover(): - pass \ No newline at end of file diff --git a/src/lib/sslcrypto/openssl/ecc.py b/src/lib/sslcrypto/openssl/ecc.py deleted file mode 100644 index c667be8a..00000000 --- a/src/lib/sslcrypto/openssl/ecc.py +++ /dev/null @@ -1,583 +0,0 @@ -import ctypes -import hmac -import threading -from .._ecc import ECC -from .aes import aes -from .library import lib, openssl_backend - - -# Initialize functions -lib.BN_new.restype = ctypes.POINTER(ctypes.c_char) -lib.BN_bin2bn.restype = ctypes.POINTER(ctypes.c_char) -lib.BN_CTX_new.restype = ctypes.POINTER(ctypes.c_char) -lib.EC_GROUP_new_curve_GFp.restype = ctypes.POINTER(ctypes.c_char) -lib.EC_KEY_new.restype = ctypes.POINTER(ctypes.c_char) -lib.EC_POINT_new.restype = ctypes.POINTER(ctypes.c_char) -lib.EC_KEY_get0_private_key.restype = ctypes.POINTER(ctypes.c_char) -lib.EVP_PKEY_new.restype = ctypes.POINTER(ctypes.c_char) -try: - lib.EVP_PKEY_CTX_new.restype = ctypes.POINTER(ctypes.c_char) -except AttributeError: - pass - - -thread_local = threading.local() - - -# This lock is required to keep ECC thread-safe. Old OpenSSL versions (before -# 1.1.0) use global objects so they aren't thread safe. Fortunately we can check -# the code to find out which functions are thread safe. -# -# For example, EC_GROUP_new_curve_GFp checks global error code to initialize -# the group, so if two errors happen at once or two threads read the error code, -# or the codes are read in the wrong order, the group is initialized in a wrong -# way. -# -# EC_KEY_new_by_curve_name calls EC_GROUP_new_curve_GFp so it's not thread -# safe. We can't use the lock because it would be too slow; instead, we use -# EC_KEY_new and then EC_KEY_set_group which calls EC_GROUP_copy instead which -# is thread safe. -lock = threading.Lock() - - -class BN: - # BN_CTX - class Context: - def __init__(self): - self.ptr = lib.BN_CTX_new() - self.lib = lib # For finalizer - - - def __del__(self): - self.lib.BN_CTX_free(self.ptr) - - - @classmethod - def get(cls): - # Get thread-safe contexf - if not hasattr(thread_local, "bn_ctx"): - thread_local.bn_ctx = cls() - return thread_local.bn_ctx.ptr - - - def __init__(self, value=None, link_only=False): - if link_only: - self.bn = value - self._free = False - else: - if value is None: - self.bn = lib.BN_new() - self._free = True - elif isinstance(value, int) and value < 256: - self.bn = lib.BN_new() - lib.BN_clear(self.bn) - lib.BN_add_word(self.bn, value) - self._free = True - else: - if isinstance(value, int): - value = value.to_bytes(128, "big") - self.bn = lib.BN_bin2bn(value, len(value), None) - self._free = True - - - def __del__(self): - if self._free: - lib.BN_free(self.bn) - - - def bytes(self, length=None): - buf = ctypes.create_string_buffer((len(self) + 7) // 8) - lib.BN_bn2bin(self.bn, buf) - buf = bytes(buf) - if length is None: - return buf - else: - if length < len(buf): - raise ValueError("Too little space for BN") - return b"\x00" * (length - len(buf)) + buf - - def __int__(self): - value = 0 - for byte in self.bytes(): - value = value * 256 + byte - return value - - def __len__(self): - return lib.BN_num_bits(self.bn) - - - def inverse(self, modulo): - result = BN() - if not lib.BN_mod_inverse(result.bn, self.bn, modulo.bn, BN.Context.get()): - raise ValueError("Could not compute inverse") - return result - - - def __floordiv__(self, other): - if not isinstance(other, BN): - raise TypeError("Can only divide BN by BN, not {}".format(other)) - result = BN() - if not lib.BN_div(result.bn, None, self.bn, other.bn, BN.Context.get()): - raise ZeroDivisionError("Division by zero") - return result - - def __mod__(self, other): - if not isinstance(other, BN): - raise TypeError("Can only divide BN by BN, not {}".format(other)) - result = BN() - if not lib.BN_div(None, result.bn, self.bn, other.bn, BN.Context.get()): - raise ZeroDivisionError("Division by zero") - return result - - def __add__(self, other): - if not isinstance(other, BN): - raise TypeError("Can only sum BN's, not BN and {}".format(other)) - result = BN() - if not lib.BN_add(result.bn, self.bn, other.bn): - raise ValueError("Could not sum two BN's") - return result - - def __sub__(self, other): - if not isinstance(other, BN): - raise TypeError("Can only subtract BN's, not BN and {}".format(other)) - result = BN() - if not lib.BN_sub(result.bn, self.bn, other.bn): - raise ValueError("Could not subtract BN from BN") - return result - - def __mul__(self, other): - if not isinstance(other, BN): - raise TypeError("Can only multiply BN by BN, not {}".format(other)) - result = BN() - if not lib.BN_mul(result.bn, self.bn, other.bn, BN.Context.get()): - raise ValueError("Could not multiply two BN's") - return result - - def __neg__(self): - return BN(0) - self - - - # A dirty but nice way to update current BN and free old BN at the same time - def __imod__(self, other): - res = self % other - self.bn, res.bn = res.bn, self.bn - return self - def __iadd__(self, other): - res = self + other - self.bn, res.bn = res.bn, self.bn - return self - def __isub__(self, other): - res = self - other - self.bn, res.bn = res.bn, self.bn - return self - def __imul__(self, other): - res = self * other - self.bn, res.bn = res.bn, self.bn - return self - - - def cmp(self, other): - if not isinstance(other, BN): - raise TypeError("Can only compare BN with BN, not {}".format(other)) - return lib.BN_cmp(self.bn, other.bn) - - def __eq__(self, other): - return self.cmp(other) == 0 - def __lt__(self, other): - return self.cmp(other) < 0 - def __gt__(self, other): - return self.cmp(other) > 0 - def __ne__(self, other): - return self.cmp(other) != 0 - def __le__(self, other): - return self.cmp(other) <= 0 - def __ge__(self, other): - return self.cmp(other) >= 0 - - - def __repr__(self): - return "".format(int(self)) - - def __str__(self): - return str(int(self)) - - -class EllipticCurveBackend: - def __init__(self, p, n, a, b, g): - bn_ctx = BN.Context.get() - - self.lib = lib # For finalizer - - self.p = BN(p) - self.order = BN(n) - self.a = BN(a) - self.b = BN(b) - self.h = BN((p + n // 2) // n) - - with lock: - # Thread-safety - self.group = lib.EC_GROUP_new_curve_GFp(self.p.bn, self.a.bn, self.b.bn, bn_ctx) - if not self.group: - raise ValueError("Could not create group object") - generator = self._public_key_to_point(g) - lib.EC_GROUP_set_generator(self.group, generator, self.order.bn, self.h.bn) - if not self.group: - raise ValueError("The curve is not supported by OpenSSL") - - self.public_key_length = (len(self.p) + 7) // 8 - - self.is_supported_evp_pkey_ctx = hasattr(lib, "EVP_PKEY_CTX_new") - - - def __del__(self): - self.lib.EC_GROUP_free(self.group) - - - def _private_key_to_ec_key(self, private_key): - # Thread-safety - eckey = lib.EC_KEY_new() - lib.EC_KEY_set_group(eckey, self.group) - if not eckey: - raise ValueError("Failed to allocate EC_KEY") - private_key = BN(private_key) - if not lib.EC_KEY_set_private_key(eckey, private_key.bn): - lib.EC_KEY_free(eckey) - raise ValueError("Invalid private key") - return eckey, private_key - - - def _public_key_to_point(self, public_key): - x = BN(public_key[0]) - y = BN(public_key[1]) - # EC_KEY_set_public_key_affine_coordinates is not supported by - # OpenSSL 1.0.0 so we can't use it - point = lib.EC_POINT_new(self.group) - if not lib.EC_POINT_set_affine_coordinates_GFp(self.group, point, x.bn, y.bn, BN.Context.get()): - raise ValueError("Could not set public key affine coordinates") - return point - - - def _public_key_to_ec_key(self, public_key): - # Thread-safety - eckey = lib.EC_KEY_new() - lib.EC_KEY_set_group(eckey, self.group) - if not eckey: - raise ValueError("Failed to allocate EC_KEY") - try: - # EC_KEY_set_public_key_affine_coordinates is not supported by - # OpenSSL 1.0.0 so we can't use it - point = self._public_key_to_point(public_key) - if not lib.EC_KEY_set_public_key(eckey, point): - raise ValueError("Could not set point") - lib.EC_POINT_free(point) - return eckey - except Exception as e: - lib.EC_KEY_free(eckey) - raise e from None - - - def _point_to_affine(self, point): - # Convert to affine coordinates - x = BN() - y = BN() - if lib.EC_POINT_get_affine_coordinates_GFp(self.group, point, x.bn, y.bn, BN.Context.get()) != 1: - raise ValueError("Failed to convert public key to affine coordinates") - # Convert to binary - if (len(x) + 7) // 8 > self.public_key_length: - raise ValueError("Public key X coordinate is too large") - if (len(y) + 7) // 8 > self.public_key_length: - raise ValueError("Public key Y coordinate is too large") - return x.bytes(self.public_key_length), y.bytes(self.public_key_length) - - - def decompress_point(self, public_key): - point = lib.EC_POINT_new(self.group) - if not point: - raise ValueError("Could not create point") - try: - if not lib.EC_POINT_oct2point(self.group, point, public_key, len(public_key), BN.Context.get()): - raise ValueError("Invalid compressed public key") - return self._point_to_affine(point) - finally: - lib.EC_POINT_free(point) - - - def new_private_key(self): - # Create random key - # Thread-safety - eckey = lib.EC_KEY_new() - lib.EC_KEY_set_group(eckey, self.group) - lib.EC_KEY_generate_key(eckey) - # To big integer - private_key = BN(lib.EC_KEY_get0_private_key(eckey), link_only=True) - # To binary - private_key_buf = private_key.bytes(self.public_key_length) - # Cleanup - lib.EC_KEY_free(eckey) - return private_key_buf - - - def private_to_public(self, private_key): - eckey, private_key = self._private_key_to_ec_key(private_key) - try: - # Derive public key - point = lib.EC_POINT_new(self.group) - try: - if not lib.EC_POINT_mul(self.group, point, private_key.bn, None, None, BN.Context.get()): - raise ValueError("Failed to derive public key") - return self._point_to_affine(point) - finally: - lib.EC_POINT_free(point) - finally: - lib.EC_KEY_free(eckey) - - - def ecdh(self, private_key, public_key): - if not self.is_supported_evp_pkey_ctx: - # Use ECDH_compute_key instead - # Create EC_KEY from private key - eckey, _ = self._private_key_to_ec_key(private_key) - try: - # Create EC_POINT from public key - point = self._public_key_to_point(public_key) - try: - key = ctypes.create_string_buffer(self.public_key_length) - if lib.ECDH_compute_key(key, self.public_key_length, point, eckey, None) == -1: - raise ValueError("Could not compute shared secret") - return bytes(key) - finally: - lib.EC_POINT_free(point) - finally: - lib.EC_KEY_free(eckey) - - # Private key: - # Create EC_KEY - eckey, _ = self._private_key_to_ec_key(private_key) - try: - # Convert to EVP_PKEY - pkey = lib.EVP_PKEY_new() - if not pkey: - raise ValueError("Could not create private key object") - try: - lib.EVP_PKEY_set1_EC_KEY(pkey, eckey) - - # Public key: - # Create EC_KEY - peer_eckey = self._public_key_to_ec_key(public_key) - try: - # Convert to EVP_PKEY - peer_pkey = lib.EVP_PKEY_new() - if not peer_pkey: - raise ValueError("Could not create public key object") - try: - lib.EVP_PKEY_set1_EC_KEY(peer_pkey, peer_eckey) - - # Create context - ctx = lib.EVP_PKEY_CTX_new(pkey, None) - if not ctx: - raise ValueError("Could not create EVP context") - try: - if lib.EVP_PKEY_derive_init(ctx) != 1: - raise ValueError("Could not initialize key derivation") - if not lib.EVP_PKEY_derive_set_peer(ctx, peer_pkey): - raise ValueError("Could not set peer") - - # Actually derive - key_len = ctypes.c_int(0) - lib.EVP_PKEY_derive(ctx, None, ctypes.byref(key_len)) - key = ctypes.create_string_buffer(key_len.value) - lib.EVP_PKEY_derive(ctx, key, ctypes.byref(key_len)) - - return bytes(key) - finally: - lib.EVP_PKEY_CTX_free(ctx) - finally: - lib.EVP_PKEY_free(peer_pkey) - finally: - lib.EC_KEY_free(peer_eckey) - finally: - lib.EVP_PKEY_free(pkey) - finally: - lib.EC_KEY_free(eckey) - - - def _subject_to_bn(self, subject): - return BN(subject[:(len(self.order) + 7) // 8]) - - - def sign(self, subject, private_key, recoverable, is_compressed, entropy): - z = self._subject_to_bn(subject) - private_key = BN(private_key) - k = BN(entropy) - - rp = lib.EC_POINT_new(self.group) - bn_ctx = BN.Context.get() - try: - # Fix Minerva - k1 = k + self.order - k2 = k1 + self.order - if len(k1) == len(k2): - k = k2 - else: - k = k1 - if not lib.EC_POINT_mul(self.group, rp, k.bn, None, None, bn_ctx): - raise ValueError("Could not generate R") - # Convert to affine coordinates - rx = BN() - ry = BN() - if lib.EC_POINT_get_affine_coordinates_GFp(self.group, rp, rx.bn, ry.bn, bn_ctx) != 1: - raise ValueError("Failed to convert R to affine coordinates") - r = rx % self.order - if r == BN(0): - raise ValueError("Invalid k") - # Calculate s = k^-1 * (z + r * private_key) mod n - s = (k.inverse(self.order) * (z + r * private_key)) % self.order - if s == BN(0): - raise ValueError("Invalid k") - - inverted = False - if s * BN(2) >= self.order: - s = self.order - s - inverted = True - - r_buf = r.bytes(self.public_key_length) - s_buf = s.bytes(self.public_key_length) - if recoverable: - # Generate recid - recid = int(ry % BN(2)) ^ inverted - # The line below is highly unlikely to matter in case of - # secp256k1 but might make sense for other curves - recid += 2 * int(rx // self.order) - if is_compressed: - return bytes([31 + recid]) + r_buf + s_buf - else: - if recid >= 4: - raise ValueError("Too big recovery ID, use compressed address instead") - return bytes([27 + recid]) + r_buf + s_buf - else: - return r_buf + s_buf - finally: - lib.EC_POINT_free(rp) - - - def recover(self, signature, subject): - recid = signature[0] - 27 if signature[0] < 31 else signature[0] - 31 - r = BN(signature[1:self.public_key_length + 1]) - s = BN(signature[self.public_key_length + 1:]) - - # Verify bounds - if r >= self.order: - raise ValueError("r is out of bounds") - if s >= self.order: - raise ValueError("s is out of bounds") - - bn_ctx = BN.Context.get() - - z = self._subject_to_bn(subject) - - rinv = r.inverse(self.order) - u1 = (-z * rinv) % self.order - u2 = (s * rinv) % self.order - - # Recover R - rx = r + BN(recid // 2) * self.order - if rx >= self.p: - raise ValueError("Rx is out of bounds") - rp = lib.EC_POINT_new(self.group) - if not rp: - raise ValueError("Could not create R") - try: - init_buf = b"\x02" + rx.bytes(self.public_key_length) - if not lib.EC_POINT_oct2point(self.group, rp, init_buf, len(init_buf), bn_ctx): - raise ValueError("Could not use Rx to initialize point") - ry = BN() - if lib.EC_POINT_get_affine_coordinates_GFp(self.group, rp, None, ry.bn, bn_ctx) != 1: - raise ValueError("Failed to convert R to affine coordinates") - if int(ry % BN(2)) != recid % 2: - # Fix Ry sign - ry = self.p - ry - if lib.EC_POINT_set_affine_coordinates_GFp(self.group, rp, rx.bn, ry.bn, bn_ctx) != 1: - raise ValueError("Failed to update R coordinates") - - # Recover public key - result = lib.EC_POINT_new(self.group) - if not result: - raise ValueError("Could not create point") - try: - if not lib.EC_POINT_mul(self.group, result, u1.bn, rp, u2.bn, bn_ctx): - raise ValueError("Could not recover public key") - return self._point_to_affine(result) - finally: - lib.EC_POINT_free(result) - finally: - lib.EC_POINT_free(rp) - - - def verify(self, signature, subject, public_key): - r_raw = signature[:self.public_key_length] - r = BN(r_raw) - s = BN(signature[self.public_key_length:]) - if r >= self.order: - raise ValueError("r is out of bounds") - if s >= self.order: - raise ValueError("s is out of bounds") - - bn_ctx = BN.Context.get() - - z = self._subject_to_bn(subject) - - pub_p = lib.EC_POINT_new(self.group) - if not pub_p: - raise ValueError("Could not create public key point") - try: - init_buf = b"\x04" + public_key[0] + public_key[1] - if not lib.EC_POINT_oct2point(self.group, pub_p, init_buf, len(init_buf), bn_ctx): - raise ValueError("Could initialize point") - - sinv = s.inverse(self.order) - u1 = (z * sinv) % self.order - u2 = (r * sinv) % self.order - - # Recover public key - result = lib.EC_POINT_new(self.group) - if not result: - raise ValueError("Could not create point") - try: - if not lib.EC_POINT_mul(self.group, result, u1.bn, pub_p, u2.bn, bn_ctx): - raise ValueError("Could not recover public key") - if BN(self._point_to_affine(result)[0]) % self.order != r: - raise ValueError("Invalid signature") - return True - finally: - lib.EC_POINT_free(result) - finally: - lib.EC_POINT_free(pub_p) - - - def derive_child(self, seed, child): - # Round 1 - h = hmac.new(key=b"Bitcoin seed", msg=seed, digestmod="sha512").digest() - private_key1 = h[:32] - x, y = self.private_to_public(private_key1) - public_key1 = bytes([0x02 + (y[-1] % 2)]) + x - private_key1 = BN(private_key1) - - # Round 2 - child_bytes = [] - for _ in range(4): - child_bytes.append(child & 255) - child >>= 8 - child_bytes = bytes(child_bytes[::-1]) - msg = public_key1 + child_bytes - h = hmac.new(key=h[32:], msg=msg, digestmod="sha512").digest() - private_key2 = BN(h[:32]) - - return ((private_key1 + private_key2) % self.order).bytes(self.public_key_length) - - - @classmethod - def get_backend(cls): - return openssl_backend - - -ecc = ECC(EllipticCurveBackend, aes) diff --git a/src/lib/sslcrypto/openssl/library.py b/src/lib/sslcrypto/openssl/library.py deleted file mode 100644 index 47bedc3a..00000000 --- a/src/lib/sslcrypto/openssl/library.py +++ /dev/null @@ -1,98 +0,0 @@ -import os -import sys -import ctypes -import ctypes.util -from .discovery import discover as user_discover - - -# Disable false-positive _MEIPASS -# pylint: disable=no-member,protected-access - -# Discover OpenSSL library -def discover_paths(): - # Search local files first - if "win" in sys.platform: - # Windows - names = [ - "libeay32.dll" - ] - openssl_paths = [os.path.abspath(path) for path in names] - if hasattr(sys, "_MEIPASS"): - openssl_paths += [os.path.join(sys._MEIPASS, path) for path in openssl_paths] - openssl_paths.append(ctypes.util.find_library("libeay32")) - elif "darwin" in sys.platform: - # Mac OS - names = [ - "libcrypto.dylib", - "libcrypto.1.1.0.dylib", - "libcrypto.1.0.2.dylib", - "libcrypto.1.0.1.dylib", - "libcrypto.1.0.0.dylib", - "libcrypto.0.9.8.dylib" - ] - openssl_paths = [os.path.abspath(path) for path in names] - openssl_paths += names - openssl_paths += [ - "/usr/local/opt/openssl/lib/libcrypto.dylib" - ] - if hasattr(sys, "_MEIPASS") and "RESOURCEPATH" in os.environ: - openssl_paths += [ - os.path.join(os.environ["RESOURCEPATH"], "..", "Frameworks", name) - for name in names - ] - openssl_paths.append(ctypes.util.find_library("ssl")) - else: - # Linux, BSD and such - names = [ - "libcrypto.so", - "libssl.so", - "libcrypto.so.1.1.0", - "libssl.so.1.1.0", - "libcrypto.so.1.0.2", - "libssl.so.1.0.2", - "libcrypto.so.1.0.1", - "libssl.so.1.0.1", - "libcrypto.so.1.0.0", - "libssl.so.1.0.0", - "libcrypto.so.0.9.8", - "libssl.so.0.9.8" - ] - openssl_paths = [os.path.abspath(path) for path in names] - openssl_paths += names - if hasattr(sys, "_MEIPASS"): - openssl_paths += [os.path.join(sys._MEIPASS, path) for path in names] - openssl_paths.append(ctypes.util.find_library("ssl")) - lst = user_discover() - if isinstance(lst, str): - lst = [lst] - elif not lst: - lst = [] - return lst + openssl_paths - - -def discover_library(): - for path in discover_paths(): - if path: - try: - return ctypes.CDLL(path) - except OSError: - pass - raise OSError("OpenSSL is unavailable") - - -lib = discover_library() - -# Initialize internal state -try: - lib.OPENSSL_add_all_algorithms_conf() -except AttributeError: - pass - -try: - lib.OpenSSL_version.restype = ctypes.c_char_p - openssl_backend = lib.OpenSSL_version(0).decode() -except AttributeError: - lib.SSLeay_version.restype = ctypes.c_char_p - openssl_backend = lib.SSLeay_version(0).decode() - -openssl_backend += " at " + lib._name diff --git a/src/lib/sslcrypto/openssl/rsa.py b/src/lib/sslcrypto/openssl/rsa.py deleted file mode 100644 index afd8b51c..00000000 --- a/src/lib/sslcrypto/openssl/rsa.py +++ /dev/null @@ -1,11 +0,0 @@ -# pylint: disable=too-few-public-methods - -from .library import openssl_backend - - -class RSA: - def get_backend(self): - return openssl_backend - - -rsa = RSA() diff --git a/src/lib/subtl/subtl.py b/src/lib/subtl/subtl.py index cd8c5b2c..e98ac69e 100644 --- a/src/lib/subtl/subtl.py +++ b/src/lib/subtl/subtl.py @@ -1,7 +1,6 @@ ''' Based on the specification at http://bittorrent.org/beps/bep_0015.html ''' -import binascii import random import struct import time @@ -17,6 +16,19 @@ SCRAPE = 2 ERROR = 3 +def norm_info_hash(info_hash): + if len(info_hash) == 40: + info_hash = info_hash.decode('hex') + if len(info_hash) != 20: + raise UdpTrackerClientException( + 'info_hash length is not 20: {}'.format(len(info_hash))) + return info_hash + + +def info_hash_to_str(info_hash): + return binascii.hexlify(info_hash) + + class UdpTrackerClientException(Exception): pass @@ -59,10 +71,9 @@ class UdpTrackerClient: self._check_fields(args, fields) # Humans tend to use hex representations of the hash. Wasteful humans. - args['info_hash'] = args['info_hash'] + args['info_hash'] = norm_info_hash(args['info_hash']) values = [args[a] for a in fields.split()] - values[1] = values[1].encode("utf8") payload = struct.pack('!20s20sQQQLLLLH', *values) return self._send(ANNOUNCE, payload) @@ -72,6 +83,7 @@ class UdpTrackerClient: payload = '' for info_hash in info_hash_list: + info_hash = norm_info_hash(info_hash) payload += info_hash trans = self._send(SCRAPE, payload) @@ -99,11 +111,11 @@ class UdpTrackerClient: return trans def error(self, message): - raise Exception('error: {}'.format(message)) + print('error: {}'.format(message)) def _send(self, action, payload=None): if not payload: - payload = b'' + payload = '' trans_id, header = self._request_header(action) self.transactions[trans_id] = trans = { 'action': action, @@ -111,8 +123,7 @@ class UdpTrackerClient: 'payload': payload, 'completed': False, } - self.sock.connect((self.host, self.port)) - self.sock.send(header + payload) + self.sock.sendto(header + payload, (self.host, self.port)) return trans def _request_header(self, action): @@ -127,7 +138,7 @@ class UdpTrackerClient: elif action == SCRAPE: return self._process_scrape(payload, trans) elif action == ERROR: - return self._process_error(payload, trans) + return self._proecss_error(payload, trans) else: raise UdpTrackerClientException( 'Unknown action response: {}'.format(action)) @@ -147,10 +158,10 @@ class UdpTrackerClient: peer_data = payload[info_size:] peer_struct = '!LH' peer_size = struct.calcsize(peer_struct) - peer_count = int(len(peer_data) / peer_size) + peer_count = len(peer_data) / peer_size peers = [] - for peer_offset in range(peer_count): + for peer_offset in xrange(peer_count): off = peer_size * peer_offset peer = peer_data[off:off + peer_size] addr, port = struct.unpack(peer_struct, peer) @@ -172,7 +183,7 @@ class UdpTrackerClient: info_count = len(payload) / info_size hashes = trans['sent_hashes'] response = {} - for info_offset in range(info_count): + for info_offset in xrange(info_count): off = info_size * info_offset info = payload[off:off + info_size] seeders, completed, leechers = struct.unpack(info_struct, info) @@ -189,13 +200,13 @@ class UdpTrackerClient: it here for the possibility. ''' self.error(payload) - return False + return payload def _generate_peer_id(self): '''http://www.bittorrent.org/beps/bep_0020.html''' peer_id = '-PU' + __version__.replace('.', '-') + '-' remaining = 20 - len(peer_id) - numbers = [str(random.randint(0, 9)) for _ in range(remaining)] + numbers = [str(random.randint(0, 9)) for _ in xrange(remaining)] peer_id += ''.join(numbers) assert(len(peer_id) == 20) return peer_id diff --git a/src/main.py b/src/main.py index ec90f4d9..98cde932 100644 --- a/src/main.py +++ b/src/main.py @@ -1,29 +1,24 @@ # Included modules import os import sys -import stat import time import logging -startup_errors = [] -def startupError(msg): - startup_errors.append(msg) - print("Startup error: %s" % msg) - # Third party modules import gevent -if gevent.version_info.major <= 1: # Workaround for random crash when libuv used with threads - try: - if "libev" not in str(gevent.config.loop): - gevent.config.loop = "libev-cext" - except Exception as err: - startupError("Unable to switch gevent loop to libev: %s" % err) +from gevent import monkey +import ssl +# Fix PROTOCOL_SSLv3 not defined +if "PROTOCOL_SSLv3" not in dir(ssl): + ssl.PROTOCOL_SSLv3 = ssl.PROTOCOL_SSLv23 -import gevent.monkey -gevent.monkey.patch_all(thread=False, subprocess=False) +if "patch_subprocess" in dir(monkey): + monkey.patch_all(thread=False, subprocess=False) +else: + monkey.patch_all(thread=False) +# Not thread: pyfilesystem and systray icon, Not subprocess: Gevent 1.1+ update_after_shutdown = False # If set True then update and restart zeronet after main loop ended -restart_after_shutdown = False # If set True then restart zeronet after main loop ended # Load config from Config import config @@ -31,44 +26,44 @@ config.parse(silent=True) # Plugins need to access the configuration if not config.arguments: # Config parse failed, show the help screen and exit config.parse() +# Create necessary files and dirs +if not os.path.isdir(config.log_dir): + os.mkdir(config.log_dir) if not os.path.isdir(config.data_dir): os.mkdir(config.data_dir) - try: - os.chmod(config.data_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) - except Exception as err: - startupError("Can't change permission of %s: %s" % (config.data_dir, err)) - if not os.path.isfile("%s/sites.json" % config.data_dir): open("%s/sites.json" % config.data_dir, "w").write("{}") if not os.path.isfile("%s/users.json" % config.data_dir): open("%s/users.json" % config.data_dir, "w").write("{}") +# Setup logging if config.action == "main": - from util import helper - try: - lock = helper.openLocked("%s/lock.pid" % config.data_dir, "w") - lock.write("%s" % os.getpid()) - except BlockingIOError as err: - startupError("Can't open lock file, your ZeroNet client is probably already running, exiting... (%s)" % err) - if config.open_browser and config.open_browser != "False": - print("Opening browser: %s...", config.open_browser) - import webbrowser - try: - if config.open_browser == "default_browser": - browser = webbrowser.get() - else: - browser = webbrowser.get(config.open_browser) - browser.open("http://%s:%s/%s" % ( - config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage - ), new=2) - except Exception as err: - startupError("Error starting browser: %s" % err) - sys.exit() + if os.path.isfile("%s/debug.log" % config.log_dir): # Simple logrotate + if os.path.isfile("%s/debug-last.log" % config.log_dir): + os.unlink("%s/debug-last.log" % config.log_dir) + os.rename("%s/debug.log" % config.log_dir, "%s/debug-last.log" % config.log_dir) + logging.basicConfig(format='[%(asctime)s] %(levelname)-8s %(name)s %(message)s', + level=logging.DEBUG, filename="%s/debug.log" % config.log_dir) +else: + logging.basicConfig(level=logging.DEBUG, stream=open(os.devnull, "w")) # No file logging if action is not main + +# Console logger +console_log = logging.StreamHandler() +if config.action == "main": # Add time if main action + console_log.setFormatter(logging.Formatter('[%(asctime)s] %(name)s %(message)s', "%H:%M:%S")) +else: + console_log.setFormatter(logging.Formatter('%(name)s %(message)s', "%H:%M:%S")) + +logging.getLogger('').addHandler(console_log) # Add console logger +logging.getLogger('').name = "-" # Remove root prefix -config.initLogging() # Debug dependent configuration from Debug import DebugHook +if config.debug: + console_log.setLevel(logging.DEBUG) # Display everything to console +else: + console_log.setLevel(logging.INFO) # Display only important info to console # Load plugins from Plugin import PluginManager @@ -79,63 +74,30 @@ config.parse() # Parse again to add plugin configuration options # Log current config logging.debug("Config: %s" % config) -# Modify stack size on special hardwares -if config.stack_size: - import threading - threading.stack_size(config.stack_size) - # Use pure-python implementation of msgpack to save CPU if config.msgpack_purepython: os.environ["MSGPACK_PUREPYTHON"] = "True" -# Fix console encoding on Windows -if sys.platform.startswith("win"): - import subprocess - try: - chcp_res = subprocess.check_output("chcp 65001", shell=True).decode(errors="ignore").strip() - logging.debug("Changed console encoding to utf8: %s" % chcp_res) - except Exception as err: - logging.error("Error changing console encoding to utf8: %s" % err) - -# Socket monkey patch +# Socks Proxy monkey patch if config.proxy: from util import SocksProxy - import urllib.request + import urllib2 logging.info("Patching sockets to socks proxy: %s" % config.proxy) - if config.fileserver_ip == "*": - config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost - config.disable_udp = True # UDP not supported currently with proxy - SocksProxy.monkeyPatch(*config.proxy.split(":")) -elif config.tor == "always": - from util import SocksProxy - import urllib.request - logging.info("Patching sockets to tor socks proxy: %s" % config.tor_proxy) - if config.fileserver_ip == "*": - config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost - SocksProxy.monkeyPatch(*config.tor_proxy.split(":")) - config.disable_udp = True -elif config.bind: - bind = config.bind - if ":" not in config.bind: - bind += ":0" - from util import helper - helper.socketBindMonkeyPatch(*bind.split(":")) + config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost + SocksProxy.monkeyPath(*config.proxy.split(":")) + # -- Actions -- - @PluginManager.acceptPlugins class Actions(object): def call(self, function_name, kwargs): - logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__)) - func = getattr(self, function_name, None) - back = func(**kwargs) - if back: - print(back) + func(**kwargs) # Default action: Start serving UiServer and FileServer def main(self): + logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__)) global ui_server, file_server from File import FileServer from Ui import UiServer @@ -143,10 +105,6 @@ class Actions(object): file_server = FileServer() logging.info("Creating UiServer....") ui_server = UiServer() - file_server.ui_server = ui_server - - for startup_error in startup_errors: - logging.error("Startup error: %s" % startup_error) logging.info("Removing old SSL certs...") from Crypt import CryptConnection @@ -154,65 +112,45 @@ class Actions(object): logging.info("Starting servers....") gevent.joinall([gevent.spawn(ui_server.start), gevent.spawn(file_server.start)]) - logging.info("All server stopped") # Site commands - def siteCreate(self, use_master_seed=True): - logging.info("Generating new privatekey (use_master_seed: %s)..." % config.use_master_seed) + def siteCreate(self): + logging.info("Generating new privatekey...") from Crypt import CryptBitcoin - if use_master_seed: - from User import UserManager - user = UserManager.user_manager.get() - if not user: - user = UserManager.user_manager.create() - address, address_index, site_data = user.getNewSiteData() - privatekey = site_data["privatekey"] - logging.info("Generated using master seed from users.json, site index: %s" % address_index) - else: - privatekey = CryptBitcoin.newPrivatekey() - address = CryptBitcoin.privatekeyToAddress(privatekey) + privatekey = CryptBitcoin.newPrivatekey() logging.info("----------------------------------------------------------------------") logging.info("Site private key: %s" % privatekey) logging.info(" !!! ^ Save it now, required to modify the site ^ !!!") + address = CryptBitcoin.privatekeyToAddress(privatekey) logging.info("Site address: %s" % address) logging.info("----------------------------------------------------------------------") - while True and not config.batch and not use_master_seed: - if input("? Have you secured your private key? (yes, no) > ").lower() == "yes": + while True and not config.batch: + if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes": break else: logging.info("Please, secure it now, you going to need it to modify your site!") logging.info("Creating directory structure...") - from Site.Site import Site - from Site import SiteManager - SiteManager.site_manager.load() - + from Site import Site os.mkdir("%s/%s" % (config.data_dir, address)) open("%s/%s/index.html" % (config.data_dir, address), "w").write("Hello %s!" % address) logging.info("Creating content.json...") site = Site(address) - extend = {"postmessage_nonce_security": True} - if use_master_seed: - extend["address_index"] = address_index - - site.content_manager.sign(privatekey=privatekey, extend=extend) + site.content_manager.sign(privatekey=privatekey) site.settings["own"] = True site.saveSettings() logging.info("Site created!") - def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False, remove_missing_optional=False): - from Site.Site import Site - from Site import SiteManager - from Debug import Debug - SiteManager.site_manager.load() + def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False): + from Site import Site logging.info("Signing site: %s..." % address) site = Site(address, allow_create=False) - if not privatekey: # If no privatekey defined + if not privatekey: # If no privatekey definied from User import UserManager user = UserManager.user_manager.get() if user: @@ -224,216 +162,93 @@ class Actions(object): # Not found in users.json, ask from console import getpass privatekey = getpass.getpass("Private key (input hidden):") - try: - succ = site.content_manager.sign( - inner_path=inner_path, privatekey=privatekey, - update_changed_files=True, remove_missing_optional=remove_missing_optional - ) - except Exception as err: - logging.error("Sign error: %s" % Debug.formatException(err)) - succ = False + succ = site.content_manager.sign(inner_path=inner_path, privatekey=privatekey, update_changed_files=True) if succ and publish: self.sitePublish(address, inner_path=inner_path) def siteVerify(self, address): import time - from Site.Site import Site - from Site import SiteManager - SiteManager.site_manager.load() - + from Site import Site s = time.time() logging.info("Verifing site: %s..." % address) site = Site(address) bad_files = [] for content_inner_path in site.content_manager.contents: - s = time.time() logging.info("Verifing %s signature..." % content_inner_path) - err = None - try: - file_correct = site.content_manager.verifyFile( - content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False - ) - except Exception as exp: - file_correct = False - err = exp - + file_correct = site.content_manager.verifyFile( + content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False + ) if file_correct is True: - logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s)) + logging.info("[OK] %s signed by address %s!" % (content_inner_path, address)) else: - logging.error("[ERROR] %s: invalid file: %s!" % (content_inner_path, err)) - input("Continue?") + logging.error("[ERROR] %s: invalid file!" % content_inner_path) bad_files += content_inner_path logging.info("Verifying site files...") - bad_files += site.storage.verifyFiles()["bad_files"] + bad_files += site.storage.verifyFiles() if not bad_files: logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s)) else: logging.error("[ERROR] Error during verifying site files!") def dbRebuild(self, address): - from Site.Site import Site - from Site import SiteManager - SiteManager.site_manager.load() - + from Site import Site logging.info("Rebuilding site sql cache: %s..." % address) - site = SiteManager.site_manager.get(address) + site = Site(address) s = time.time() - try: - site.storage.rebuildDb() - logging.info("Done in %.3fs" % (time.time() - s)) - except Exception as err: - logging.error(err) + site.storage.rebuildDb() + logging.info("Done in %.3fs" % (time.time() - s)) def dbQuery(self, address, query): - from Site.Site import Site - from Site import SiteManager - SiteManager.site_manager.load() - + from Site import Site import json site = Site(address) result = [] for row in site.storage.query(query): result.append(dict(row)) - print(json.dumps(result, indent=4)) + print json.dumps(result, indent=4) def siteAnnounce(self, address): from Site.Site import Site - from Site import SiteManager - SiteManager.site_manager.load() - - logging.info("Opening a simple connection server") - global file_server - from File import FileServer - file_server = FileServer("127.0.0.1", 1234) - file_server.start() - logging.info("Announcing site %s to tracker..." % address) site = Site(address) s = time.time() site.announce() - print("Response time: %.3fs" % (time.time() - s)) - print(site.peers) - - def siteDownload(self, address): - from Site.Site import Site - from Site import SiteManager - SiteManager.site_manager.load() - - logging.info("Opening a simple connection server") - global file_server - from File import FileServer - file_server = FileServer("127.0.0.1", 1234) - file_server_thread = gevent.spawn(file_server.start, check_sites=False) - - site = Site(address) - - on_completed = gevent.event.AsyncResult() - - def onComplete(evt): - evt.set(True) - - site.onComplete.once(lambda: onComplete(on_completed)) - print("Announcing...") - site.announce() - - s = time.time() - print("Downloading...") - site.downloadContent("content.json", check_modifications=True) - - print("Downloaded in %.3fs" % (time.time()-s)) + print "Response time: %.3fs" % (time.time() - s) + print site.peers def siteNeedFile(self, address, inner_path): - from Site.Site import Site - from Site import SiteManager - SiteManager.site_manager.load() - - def checker(): - while 1: - s = time.time() - time.sleep(1) - print("Switch time:", time.time() - s) - gevent.spawn(checker) - - logging.info("Opening a simple connection server") - global file_server - from File import FileServer - file_server = FileServer("127.0.0.1", 1234) - file_server_thread = gevent.spawn(file_server.start, check_sites=False) - + from Site import Site site = Site(address) site.announce() - print(site.needFile(inner_path, update=True)) - - def siteCmd(self, address, cmd, parameters): - import json - from Site import SiteManager - - site = SiteManager.site_manager.get(address) - - if not site: - logging.error("Site not found: %s" % address) - return None - - ws = self.getWebsocket(site) - - ws.send(json.dumps({"cmd": cmd, "params": parameters, "id": 1})) - res_raw = ws.recv() - - try: - res = json.loads(res_raw) - except Exception as err: - return {"error": "Invalid result: %s" % err, "res_raw": res_raw} - - if "result" in res: - return res["result"] - else: - return res - - def getWebsocket(self, site): - import websocket - - ws_address = "ws://%s:%s/Websocket?wrapper_key=%s" % (config.ui_ip, config.ui_port, site.settings["wrapper_key"]) - logging.info("Connecting to %s" % ws_address) - ws = websocket.create_connection(ws_address) - return ws + print site.needFile(inner_path, update=True) def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"): global file_server - from Site.Site import Site from Site import SiteManager from File import FileServer # We need fileserver to handle incoming file requests from Peer import Peer - file_server = FileServer() - site = SiteManager.site_manager.get(address) + logging.info("Loading site...") + site = SiteManager.site_manager.list()[address] site.settings["serving"] = True # Serving the site even if its disabled - try: - ws = self.getWebsocket(site) - logging.info("Sending siteReload") - self.siteCmd(address, "siteReload", inner_path) - - logging.info("Sending sitePublish") - self.siteCmd(address, "sitePublish", {"inner_path": inner_path, "sign": False}) - logging.info("Done.") - - except Exception as err: - logging.info("Can't connect to local websocket client: %s" % err) - logging.info("Creating FileServer....") - file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity - time.sleep(0.001) + logging.info("Creating FileServer....") + file_server = FileServer() + file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity + time.sleep(0) + if not file_server_thread.ready(): # Started fileserver - file_server.portCheck() + file_server.openport() if peer_ip: # Announce ip specificed site.addPeer(peer_ip, peer_port) else: # Just ask the tracker logging.info("Gathering peers from tracker") site.announce() # Gather peers - published = site.publish(10, inner_path) # Push to peers + published = site.publish(20, inner_path) # Push to 20 peers if published > 0: time.sleep(3) logging.info("Serving files (max 60s)...") @@ -441,6 +256,15 @@ class Actions(object): logging.info("Done.") else: logging.info("No peers found, sitePublish command only works if you already have visitors serving your site") + else: + # Notify local client on new content + logging.info("Sending siteReload") + my_peer = Peer("127.0.0.1", config.fileserver_port) + logging.info(my_peer.request("siteReload", {"site": site.address, "inner_path": inner_path})) + logging.info("Sending sitePublish") + logging.info(my_peer.request("sitePublish", {"site": site.address, "inner_path": inner_path})) + logging.info("Done.") + # Crypto commands def cryptPrivatekeyToAddress(self, privatekey=None): @@ -449,95 +273,52 @@ class Actions(object): import getpass privatekey = getpass.getpass("Private key (input hidden):") - print(CryptBitcoin.privatekeyToAddress(privatekey)) + print CryptBitcoin.privatekeyToAddress(privatekey) def cryptSign(self, message, privatekey): from Crypt import CryptBitcoin - print(CryptBitcoin.sign(message, privatekey)) - - def cryptVerify(self, message, sign, address): - from Crypt import CryptBitcoin - print(CryptBitcoin.verify(message, address, sign)) - - def cryptGetPrivatekey(self, master_seed, site_address_index=None): - from Crypt import CryptBitcoin - if len(master_seed) != 64: - logging.error("Error: Invalid master seed length: %s (required: 64)" % len(master_seed)) - return False - privatekey = CryptBitcoin.hdPrivatekey(master_seed, site_address_index) - print("Requested private key: %s" % privatekey) + print CryptBitcoin.sign(message, privatekey) # Peer def peerPing(self, peer_ip, peer_port=None): if not peer_port: - peer_port = 15441 + peer_port = config.fileserver_port logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer("127.0.0.1", 1234) - file_server.start(check_connections=False) - from Crypt import CryptConnection - CryptConnection.manager.loadCerts() from Peer import Peer logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port))) - s = time.time() - peer = Peer(peer_ip, peer_port) - peer.connect() - - if not peer.connection: - print("Error: Can't connect to peer (connection error: %s)" % peer.connection_error) - return False - if "shared_ciphers" in dir(peer.connection.sock): - print("Shared ciphers:", peer.connection.sock.shared_ciphers()) - if "cipher" in dir(peer.connection.sock): - print("Cipher:", peer.connection.sock.cipher()[0]) - if "version" in dir(peer.connection.sock): - print("TLS version:", peer.connection.sock.version()) - print("Connection time: %.3fs (connection error: %s)" % (time.time() - s, peer.connection_error)) - - for i in range(5): - ping_delay = peer.ping() - print("Response time: %.3fs" % ping_delay) - time.sleep(1) - peer.remove() - print("Reconnect test...") peer = Peer(peer_ip, peer_port) for i in range(5): - ping_delay = peer.ping() - print("Response time: %.3fs" % ping_delay) + s = time.time() + print peer.ping(), + print "Response time: %.3fs (crypt: %s)" % (time.time() - s, peer.connection.crypt) time.sleep(1) def peerGetFile(self, peer_ip, peer_port, site, filename, benchmark=False): logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer - file_server = ConnectionServer("127.0.0.1", 1234) - file_server.start(check_connections=False) - from Crypt import CryptConnection - CryptConnection.manager.loadCerts() + file_server = ConnectionServer() from Peer import Peer logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port)) peer = Peer(peer_ip, peer_port) s = time.time() + peer.getFile(site, filename) if benchmark: for i in range(10): - peer.getFile(site, filename), - print("Response time: %.3fs" % (time.time() - s)) - input("Check memory") - else: - print(peer.getFile(site, filename).read()) + print peer.getFile(site, filename), + print "Response time: %.3fs" % (time.time() - s) + raw_input("Check memory") def peerCmd(self, peer_ip, peer_port, cmd, parameters): logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer() - file_server.start(check_connections=False) - from Crypt import CryptConnection - CryptConnection.manager.loadCerts() - from Peer import Peer peer = Peer(peer_ip, peer_port) @@ -546,51 +327,7 @@ class Actions(object): parameters = json.loads(parameters.replace("'", '"')) else: parameters = {} - try: - res = peer.request(cmd, parameters) - print(json.dumps(res, indent=2, ensure_ascii=False)) - except Exception as err: - print("Unknown response (%s): %s" % (err, res)) - - def getConfig(self): - import json - print(json.dumps(config.getServerInfo(), indent=2, ensure_ascii=False)) - - def test(self, test_name, *args, **kwargs): - import types - def funcToName(func_name): - test_name = func_name.replace("test", "") - return test_name[0].lower() + test_name[1:] - - test_names = [funcToName(name) for name in dir(self) if name.startswith("test") and name != "test"] - if not test_name: - # No test specificed, list tests - print("\nNo test specified, possible tests:") - for test_name in test_names: - func_name = "test" + test_name[0].upper() + test_name[1:] - func = getattr(self, func_name) - if func.__doc__: - print("- %s: %s" % (test_name, func.__doc__.strip())) - else: - print("- %s" % test_name) - return None - - # Run tests - func_name = "test" + test_name[0].upper() + test_name[1:] - if hasattr(self, func_name): - func = getattr(self, func_name) - print("- Running test: %s" % test_name, end="") - s = time.time() - ret = func(*args, **kwargs) - if type(ret) is types.GeneratorType: - for progress in ret: - print(progress, end="") - sys.stdout.flush() - print("\n* Test %s done in %.3fs" % (test_name, time.time() - s)) - else: - print("Unknown test: %r (choose from: %s)" % ( - test_name, test_names - )) + logging.info("Response: %s" % peer.request(cmd, parameters)) actions = Actions() diff --git a/src/util/Cached.py b/src/util/Cached.py deleted file mode 100644 index 72d60dbc..00000000 --- a/src/util/Cached.py +++ /dev/null @@ -1,68 +0,0 @@ -import time - - -class Cached(object): - def __init__(self, timeout): - self.cache_db = {} - self.timeout = timeout - - def __call__(self, func): - def wrapper(*args, **kwargs): - key = "%s %s" % (args, kwargs) - cached_value = None - cache_hit = False - if key in self.cache_db: - cache_hit = True - cached_value, time_cached_end = self.cache_db[key] - if time.time() > time_cached_end: - self.cleanupExpired() - cached_value = None - cache_hit = False - - if cache_hit: - return cached_value - else: - cached_value = func(*args, **kwargs) - time_cached_end = time.time() + self.timeout - self.cache_db[key] = (cached_value, time_cached_end) - return cached_value - - wrapper.emptyCache = self.emptyCache - - return wrapper - - def cleanupExpired(self): - for key in list(self.cache_db.keys()): - cached_value, time_cached_end = self.cache_db[key] - if time.time() > time_cached_end: - del(self.cache_db[key]) - - def emptyCache(self): - num = len(self.cache_db) - self.cache_db.clear() - return num - - -if __name__ == "__main__": - from gevent import monkey - monkey.patch_all() - - @Cached(timeout=2) - def calcAdd(a, b): - print("CalcAdd", a, b) - return a + b - - @Cached(timeout=1) - def calcMultiply(a, b): - print("calcMultiply", a, b) - return a * b - - for i in range(5): - print("---") - print("Emptied", calcAdd.emptyCache()) - assert calcAdd(1, 2) == 3 - print("Emptied", calcAdd.emptyCache()) - assert calcAdd(1, 2) == 3 - assert calcAdd(2, 3) == 5 - assert calcMultiply(2, 3) == 6 - time.sleep(1) diff --git a/src/util/Diff.py b/src/util/Diff.py deleted file mode 100644 index 53b82c5a..00000000 --- a/src/util/Diff.py +++ /dev/null @@ -1,50 +0,0 @@ -import io - -import difflib - - -def sumLen(lines): - return sum(map(len, lines)) - - -def diff(old, new, limit=False): - matcher = difflib.SequenceMatcher(None, old, new) - actions = [] - size = 0 - for tag, old_from, old_to, new_from, new_to in matcher.get_opcodes(): - if tag == "insert": - new_line = new[new_from:new_to] - actions.append(("+", new_line)) - size += sum(map(len, new_line)) - elif tag == "equal": - actions.append(("=", sumLen(old[old_from:old_to]))) - elif tag == "delete": - actions.append(("-", sumLen(old[old_from:old_to]))) - elif tag == "replace": - actions.append(("-", sumLen(old[old_from:old_to]))) - new_lines = new[new_from:new_to] - actions.append(("+", new_lines)) - size += sumLen(new_lines) - if limit and size > limit: - return False - return actions - - -def patch(old_f, actions): - new_f = io.BytesIO() - for action, param in actions: - if type(action) is bytes: - action = action.decode() - if action == "=": # Same lines - new_f.write(old_f.read(param)) - elif action == "-": # Delete lines - old_f.seek(param, 1) # Seek from current position - continue - elif action == "+": # Add lines - for add_line in param: - if type(add_line) is str: - add_line = add_line.encode() - new_f.write(add_line) - else: - raise "Unknown action: %s" % action - return new_f diff --git a/src/util/Electrum.py b/src/util/Electrum.py deleted file mode 100644 index 112151aa..00000000 --- a/src/util/Electrum.py +++ /dev/null @@ -1,39 +0,0 @@ -import hashlib -import struct - - -# Electrum, the heck?! - -def bchr(i): - return struct.pack("B", i) - -def encode(val, base, minlen=0): - base, minlen = int(base), int(minlen) - code_string = b"".join([bchr(x) for x in range(256)]) - result = b"" - while val > 0: - index = val % base - result = code_string[index:index + 1] + result - val //= base - return code_string[0:1] * max(minlen - len(result), 0) + result - -def insane_int(x): - x = int(x) - if x < 253: - return bchr(x) - elif x < 65536: - return bchr(253) + encode(x, 256, 2)[::-1] - elif x < 4294967296: - return bchr(254) + encode(x, 256, 4)[::-1] - else: - return bchr(255) + encode(x, 256, 8)[::-1] - - -def magic(message): - return b"\x18Bitcoin Signed Message:\n" + insane_int(len(message)) + message - -def format(message): - return hashlib.sha256(magic(message)).digest() - -def dbl_format(message): - return hashlib.sha256(format(message)).digest() diff --git a/src/util/Event.py b/src/util/Event.py index 9d642736..b9614795 100644 --- a/src/util/Event.py +++ b/src/util/Event.py @@ -28,19 +28,19 @@ class Event(list): if __name__ == "__main__": def testBenchmark(): def say(pre, text): - print("%s Say: %s" % (pre, text)) + print "%s Say: %s" % (pre, text) import time s = time.time() on_changed = Event() for i in range(1000): on_changed.once(lambda pre: say(pre, "once"), "once") - print("Created 1000 once in %.3fs" % (time.time() - s)) + print "Created 1000 once in %.3fs" % (time.time() - s) on_changed("#1") def testUsage(): def say(pre, text): - print("%s Say: %s" % (pre, text)) + print "%s Say: %s" % (pre, text) on_changed = Event() on_changed.once(lambda pre: say(pre, "once")) diff --git a/src/util/Flag.py b/src/util/Flag.py deleted file mode 100644 index 37cfdfba..00000000 --- a/src/util/Flag.py +++ /dev/null @@ -1,22 +0,0 @@ -from collections import defaultdict - - -class Flag(object): - def __init__(self): - self.valid_flags = set([ - "admin", # Only allowed to run sites with ADMIN permission - "async_run", # Action will be ran async with gevent.spawn - "no_multiuser" # Action disabled if Multiuser plugin running in open proxy mode - ]) - self.db = defaultdict(set) - - def __getattr__(self, key): - def func(f): - if key not in self.valid_flags: - raise Exception("Invalid flag: %s (valid: %s)" % (key, self.valid_flags)) - self.db[f.__name__].add(key) - return f - return func - - -flag = Flag() diff --git a/src/util/GreenletManager.py b/src/util/GreenletManager.py deleted file mode 100644 index e024233d..00000000 --- a/src/util/GreenletManager.py +++ /dev/null @@ -1,24 +0,0 @@ -import gevent -from Debug import Debug - - -class GreenletManager: - def __init__(self): - self.greenlets = set() - - def spawnLater(self, *args, **kwargs): - greenlet = gevent.spawn_later(*args, **kwargs) - greenlet.link(lambda greenlet: self.greenlets.remove(greenlet)) - self.greenlets.add(greenlet) - return greenlet - - def spawn(self, *args, **kwargs): - greenlet = gevent.spawn(*args, **kwargs) - greenlet.link(lambda greenlet: self.greenlets.remove(greenlet)) - self.greenlets.add(greenlet) - return greenlet - - def stopGreenlets(self, reason="Stopping all greenlets"): - num = len(self.greenlets) - gevent.killall(list(self.greenlets), Debug.createNotifyType(reason), block=False) - return num diff --git a/src/util/Http.py b/src/util/Http.py new file mode 100644 index 00000000..05f6446e --- /dev/null +++ b/src/util/Http.py @@ -0,0 +1,12 @@ +import urllib2 +import logging + +from Config import config + + +def get(url, accept="application/json"): + logging.debug("Get %s" % url) + req = urllib2.Request(url) + req.add_header('User-Agent', "ZeroNet %s (https://github.com/HelloZeroNet/ZeroNet)" % config.version) + req.add_header('Accept', accept) + return urllib2.urlopen(req) diff --git a/src/util/Msgpack.py b/src/util/Msgpack.py deleted file mode 100644 index 1033f92e..00000000 --- a/src/util/Msgpack.py +++ /dev/null @@ -1,101 +0,0 @@ -import os -import struct -import io - -import msgpack -import msgpack.fallback - - -def msgpackHeader(size): - if size <= 2 ** 8 - 1: - return b"\xc4" + struct.pack("B", size) - elif size <= 2 ** 16 - 1: - return b"\xc5" + struct.pack(">H", size) - elif size <= 2 ** 32 - 1: - return b"\xc6" + struct.pack(">I", size) - else: - raise Exception("huge binary string") - - -def stream(data, writer): - packer = msgpack.Packer(use_bin_type=True) - writer(packer.pack_map_header(len(data))) - for key, val in data.items(): - writer(packer.pack(key)) - if isinstance(val, io.IOBase): # File obj - max_size = os.fstat(val.fileno()).st_size - val.tell() - size = min(max_size, val.read_bytes) - bytes_left = size - writer(msgpackHeader(size)) - buff = 1024 * 64 - while 1: - writer(val.read(min(bytes_left, buff))) - bytes_left = bytes_left - buff - if bytes_left <= 0: - break - else: # Simple - writer(packer.pack(val)) - return size - - -class FilePart(object): - __slots__ = ("file", "read_bytes", "__class__") - - def __init__(self, *args, **kwargs): - self.file = open(*args, **kwargs) - self.__enter__ == self.file.__enter__ - - def __getattr__(self, attr): - return getattr(self.file, attr) - - def __enter__(self, *args, **kwargs): - return self.file.__enter__(*args, **kwargs) - - def __exit__(self, *args, **kwargs): - return self.file.__exit__(*args, **kwargs) - - -# Don't try to decode the value of these fields as utf8 -bin_value_keys = ("hashfield_raw", "peers", "peers_ipv6", "peers_onion", "body", "sites", "bin") - - -def objectDecoderHook(obj): - global bin_value_keys - back = {} - for key, val in obj: - if type(key) is bytes: - key = key.decode("utf8") - if key in bin_value_keys or type(val) is not bytes or len(key) >= 64: - back[key] = val - else: - back[key] = val.decode("utf8") - return back - - -def getUnpacker(fallback=False, decode=True): - if fallback: # Pure Python - unpacker = msgpack.fallback.Unpacker - else: - unpacker = msgpack.Unpacker - - extra_kwargs = {"max_buffer_size": 5 * 1024 * 1024} - if msgpack.version[0] >= 1: - extra_kwargs["strict_map_key"] = False - - if decode: # Workaround for backward compatibility: Try to decode bin to str - unpacker = unpacker(raw=True, object_pairs_hook=objectDecoderHook, **extra_kwargs) - else: - unpacker = unpacker(raw=False, **extra_kwargs) - - return unpacker - - -def pack(data, use_bin_type=True): - return msgpack.packb(data, use_bin_type=use_bin_type) - - -def unpack(data, decode=True): - unpacker = getUnpacker(decode=decode) - unpacker.feed(data) - return next(unpacker) - diff --git a/src/util/Noparallel.py b/src/util/Noparallel.py index 4a4a854d..49adddbb 100644 --- a/src/util/Noparallel.py +++ b/src/util/Noparallel.py @@ -1,45 +1,21 @@ import gevent import time -from gevent.event import AsyncResult - -from . import ThreadPool -class Noparallel: # Only allow function running once in same time +class Noparallel(object): # Only allow function running once in same time - def __init__(self, blocking=True, ignore_args=False, ignore_class=False, queue=False): + def __init__(self, blocking=True): self.threads = {} self.blocking = blocking # Blocking: Acts like normal function else thread returned - self.queue = queue # Execute again when blocking is done - self.queued = False - self.ignore_args = ignore_args # Block does not depend on function call arguments - self.ignore_class = ignore_class # Block does not depeds on class instance def __call__(self, func): def wrapper(*args, **kwargs): - if not ThreadPool.isMainThread(): - return ThreadPool.main_loop.call(wrapper, *args, **kwargs) - - if self.ignore_class: - key = func # Unique key only by function and class object - elif self.ignore_args: - key = (func, args[0]) # Unique key only by function and class object - else: - key = (func, tuple(args), str(kwargs)) # Unique key for function including parameters + key = (func, tuple(args), tuple(kwargs.items())) # Unique key for function including parameters if key in self.threads: # Thread already running (if using blocking mode) - if self.queue: - self.queued = True thread = self.threads[key] if self.blocking: - if self.queued: - res = thread.get() # Blocking until its finished - if key in self.threads: - return self.threads[key].get() # Queue finished since started running - self.queued = False - return wrapper(*args, **kwargs) # Run again after the end - else: - return thread.get() # Return the value - + thread.join() # Blocking until its finished + return thread.value # Return the value else: # No blocking if thread.ready(): # Its finished, create a new thread = gevent.spawn(func, *args, **kwargs) @@ -48,24 +24,16 @@ class Noparallel: # Only allow function running once in same time else: # Still running return thread else: # Thread not running + thread = gevent.spawn(func, *args, **kwargs) # Spawning new thread + thread.link(lambda thread: self.cleanup(key, thread)) + self.threads[key] = thread if self.blocking: # Wait for finish - asyncres = AsyncResult() - self.threads[key] = asyncres - try: - res = func(*args, **kwargs) - asyncres.set(res) - self.cleanup(key, asyncres) - return res - except Exception as err: - asyncres.set_exception(err) - self.cleanup(key, asyncres) - raise(err) + thread.join() + ret = thread.value + return ret else: # No blocking just return the thread - thread = gevent.spawn(func, *args, **kwargs) # Spawning new thread - thread.link(lambda thread: self.cleanup(key, thread)) - self.threads[key] = thread return thread - wrapper.__name__ = func.__name__ + wrapper.func_name = func.func_name return wrapper @@ -76,14 +44,12 @@ class Noparallel: # Only allow function running once in same time if __name__ == "__main__": - - class Test(): @Noparallel() def count(self, num=5): for i in range(num): - print(self, i) + print self, i time.sleep(1) return "%s return:%s" % (self, i) @@ -92,59 +58,59 @@ if __name__ == "__main__": @Noparallel(blocking=False) def count(self, num=5): for i in range(num): - print(self, i) + print self, i time.sleep(1) return "%s return:%s" % (self, i) def testBlocking(): test = Test() test2 = Test() - print("Counting...") - print("Creating class1/thread1") + print "Counting..." + print "Creating class1/thread1" thread1 = gevent.spawn(test.count) - print("Creating class1/thread2 (ignored)") + print "Creating class1/thread2 (ignored)" thread2 = gevent.spawn(test.count) - print("Creating class2/thread3") + print "Creating class2/thread3" thread3 = gevent.spawn(test2.count) - print("Joining class1/thread1") + print "Joining class1/thread1" thread1.join() - print("Joining class1/thread2") + print "Joining class1/thread2" thread2.join() - print("Joining class2/thread3") + print "Joining class2/thread3" thread3.join() - print("Creating class1/thread4 (its finished, allowed again)") + print "Creating class1/thread4 (its finished, allowed again)" thread4 = gevent.spawn(test.count) - print("Joining thread4") + print "Joining thread4" thread4.join() - print(thread1.value, thread2.value, thread3.value, thread4.value) - print("Done.") + print thread1.value, thread2.value, thread3.value, thread4.value + print "Done." def testNoblocking(): test = TestNoblock() test2 = TestNoblock() - print("Creating class1/thread1") + print "Creating class1/thread1" thread1 = test.count() - print("Creating class1/thread2 (ignored)") + print "Creating class1/thread2 (ignored)" thread2 = test.count() - print("Creating class2/thread3") + print "Creating class2/thread3" thread3 = test2.count() - print("Joining class1/thread1") + print "Joining class1/thread1" thread1.join() - print("Joining class1/thread2") + print "Joining class1/thread2" thread2.join() - print("Joining class2/thread3") + print "Joining class2/thread3" thread3.join() - print("Creating class1/thread4 (its finished, allowed again)") + print "Creating class1/thread4 (its finished, allowed again)" thread4 = test.count() - print("Joining thread4") + print "Joining thread4" thread4.join() - print(thread1.value, thread2.value, thread3.value, thread4.value) - print("Done.") + print thread1.value, thread2.value, thread3.value, thread4.value + print "Done." def testBenchmark(): import time @@ -153,50 +119,21 @@ if __name__ == "__main__": import gc from greenlet import greenlet objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)] - print("Greenlets: %s" % len(objs)) + print "Greenlets: %s" % len(objs) printThreadNum() test = TestNoblock() s = time.time() for i in range(3): gevent.spawn(test.count, i + 1) - print("Created in %.3fs" % (time.time() - s)) + print "Created in %.3fs" % (time.time() - s) printThreadNum() time.sleep(5) - - def testException(): - import time - @Noparallel(blocking=True, queue=True) - def count(self, num=5): - s = time.time() - # raise Exception("err") - for i in range(num): - print(self, i) - time.sleep(1) - return "%s return:%s" % (s, i) - def caller(): - try: - print("Ret:", count(5)) - except Exception as err: - print("Raised:", repr(err)) - - gevent.joinall([ - gevent.spawn(caller), - gevent.spawn(caller), - gevent.spawn(caller), - gevent.spawn(caller) - ]) - - from gevent import monkey monkey.patch_all() - testException() - - """ testBenchmark() - print("Testing blocking mode...") + print "Testing blocking mode..." testBlocking() - print("Testing noblocking mode...") + print "Testing noblocking mode..." testNoblocking() - """ diff --git a/src/util/OpensslFindPatch.py b/src/util/OpensslFindPatch.py deleted file mode 100644 index 0f5d2dc6..00000000 --- a/src/util/OpensslFindPatch.py +++ /dev/null @@ -1,69 +0,0 @@ -import logging -import os -import sys -import ctypes.util - -from Config import config - -find_library_original = ctypes.util.find_library - - -def getOpensslPath(): - if config.openssl_lib_file: - return config.openssl_lib_file - - if sys.platform.startswith("win"): - lib_paths = [ - os.path.join(os.getcwd(), "tools/openssl/libeay32.dll"), # ZeroBundle Windows - os.path.join(os.path.dirname(sys.executable), "DLLs/libcrypto-1_1-x64.dll"), - os.path.join(os.path.dirname(sys.executable), "DLLs/libcrypto-1_1.dll") - ] - elif sys.platform == "cygwin": - lib_paths = ["/bin/cygcrypto-1.0.0.dll"] - else: - lib_paths = [ - "../runtime/lib/libcrypto.so.1.1", # ZeroBundle Linux - "../../Frameworks/libcrypto.1.1.dylib", # ZeroBundle macOS - "/opt/lib/libcrypto.so.1.0.0", # For optware and entware - "/usr/local/ssl/lib/libcrypto.so" - ] - - for lib_path in lib_paths: - if os.path.isfile(lib_path): - return lib_path - - if "ANDROID_APP_PATH" in os.environ: - try: - lib_dir = os.environ["ANDROID_APP_PATH"] + "/../../lib" - return [lib for lib in os.listdir(lib_dir) if "crypto" in lib][0] - except Exception as err: - logging.debug("OpenSSL lib not found in: %s (%s)" % (lib_dir, err)) - - if "LD_LIBRARY_PATH" in os.environ: - lib_dir_paths = os.environ["LD_LIBRARY_PATH"].split(":") - for path in lib_dir_paths: - try: - return [lib for lib in os.listdir(path) if "libcrypto.so" in lib][0] - except Exception as err: - logging.debug("OpenSSL lib not found in: %s (%s)" % (path, err)) - - lib_path = ( - find_library_original('ssl.so') or find_library_original('ssl') or - find_library_original('crypto') or find_library_original('libcrypto') or 'libeay32' - ) - - return lib_path - - -def patchCtypesOpensslFindLibrary(): - def findLibraryPatched(name): - if name in ("ssl", "crypto", "libeay32"): - lib_path = getOpensslPath() - return lib_path - else: - return find_library_original(name) - - ctypes.util.find_library = findLibraryPatched - - -patchCtypesOpensslFindLibrary() diff --git a/src/util/Platform.py b/src/util/Platform.py deleted file mode 100644 index 5bdde2f8..00000000 --- a/src/util/Platform.py +++ /dev/null @@ -1,36 +0,0 @@ -import sys -import logging - - -def setMaxfilesopened(limit): - try: - if sys.platform == "win32": - import ctypes - dll = None - last_err = None - for dll_name in ["msvcr100", "msvcr110", "msvcr120"]: - try: - dll = getattr(ctypes.cdll, dll_name) - break - except OSError as err: - last_err = err - - if not dll: - raise last_err - - maxstdio = dll._getmaxstdio() - if maxstdio < limit: - logging.debug("%s: Current maxstdio: %s, changing to %s..." % (dll, maxstdio, limit)) - dll._setmaxstdio(limit) - return True - else: - import resource - soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) - if soft < limit: - logging.debug("Current RLIMIT_NOFILE: %s (max: %s), changing to %s..." % (soft, hard, limit)) - resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard)) - return True - - except Exception as err: - logging.error("Failed to modify max files open limit: %s" % err) - return False diff --git a/src/util/Pooled.py b/src/util/Pooled.py deleted file mode 100644 index 9a4a7b63..00000000 --- a/src/util/Pooled.py +++ /dev/null @@ -1,65 +0,0 @@ -import gevent.pool - - -class Pooled(object): - def __init__(self, size=100): - self.pool = gevent.pool.Pool(size) - self.pooler_running = False - self.queue = [] - self.func = None - - def waiter(self, evt, args, kwargs): - res = self.func(*args, **kwargs) - if type(res) == gevent.event.AsyncResult: - evt.set(res.get()) - else: - evt.set(res) - - def pooler(self): - while self.queue: - evt, args, kwargs = self.queue.pop(0) - self.pool.spawn(self.waiter, evt, args, kwargs) - self.pooler_running = False - - def __call__(self, func): - def wrapper(*args, **kwargs): - evt = gevent.event.AsyncResult() - self.queue.append((evt, args, kwargs)) - if not self.pooler_running: - self.pooler_running = True - gevent.spawn(self.pooler) - return evt - wrapper.__name__ = func.__name__ - self.func = func - - return wrapper - -if __name__ == "__main__": - import gevent - import gevent.pool - import gevent.queue - import gevent.event - import gevent.monkey - import time - - gevent.monkey.patch_all() - - def addTask(inner_path): - evt = gevent.event.AsyncResult() - gevent.spawn_later(1, lambda: evt.set(True)) - return evt - - def needFile(inner_path): - return addTask(inner_path) - - @Pooled(10) - def pooledNeedFile(inner_path): - return needFile(inner_path) - - threads = [] - for i in range(100): - threads.append(pooledNeedFile(i)) - - s = time.time() - gevent.joinall(threads) # Should take 10 second - print(time.time() - s) diff --git a/src/util/QueryJson.py b/src/util/QueryJson.py index d9921ff0..0eb56633 100644 --- a/src/util/QueryJson.py +++ b/src/util/QueryJson.py @@ -13,15 +13,12 @@ def queryFile(file_path, filter_path, filter_key=None, filter_val=None): if not data: return - if type(data) == list: - for row in data: - if filter_val: # Filter by value - if row[filter_key] == filter_val: - back.append(row) - else: + for row in data: + if filter_val: # Filter by value + if row[filter_key] == filter_val: back.append(row) - else: - back.append({"value": data}) + else: + back.append(row) return back @@ -64,4 +61,4 @@ def query(path_pattern, filter): if __name__ == "__main__": for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")): - print(row) + print row diff --git a/src/util/RateLimit.py b/src/util/RateLimit.py index 465859c2..7beb94c5 100644 --- a/src/util/RateLimit.py +++ b/src/util/RateLimit.py @@ -27,17 +27,11 @@ def isAllowed(event, allowed_again=10): else: return False -def delayLeft(event, allowed_again=10): - last_called = called_db.get(event) - if not last_called: # Its not called before - return 0 - else: - return allowed_again - (time.time() - last_called) def callQueue(event): func, args, kwargs, thread = queue_db[event] log.debug("Calling: %s" % event) - called(event) + del called_db[event] del queue_db[event] return func(*args, **kwargs) @@ -78,7 +72,8 @@ def call(event, allowed_again=10, func=None, *args, **kwargs): called(event, time_left) time.sleep(time_left) back = func(*args, **kwargs) - called(event) + if event in called_db: + del called_db[event] return back @@ -86,7 +81,7 @@ def call(event, allowed_again=10, func=None, *args, **kwargs): def rateLimitCleanup(): while 1: expired = time.time() - 60 * 2 # Cleanup if older than 2 minutes - for event in list(called_db.keys()): + for event in called_db.keys(): if called_db[event] < expired: del called_db[event] time.sleep(60 * 3) # Every 3 minutes @@ -99,30 +94,30 @@ if __name__ == "__main__": import random def publish(inner_path): - print("Publishing %s..." % inner_path) + print "Publishing %s..." % inner_path return 1 def cb(thread): - print("Value:", thread.value) + print "Value:", thread.value - print("Testing async spam requests rate limit to 1/sec...") + print "Testing async spam requests rate limit to 1/sec..." for i in range(3000): thread = callAsync("publish content.json", 1, publish, "content.json %s" % i) time.sleep(float(random.randint(1, 20)) / 100000) - print(thread.link(cb)) - print("Done") + print thread.link(cb) + print "Done" time.sleep(2) - print("Testing sync spam requests rate limit to 1/sec...") + print "Testing sync spam requests rate limit to 1/sec..." for i in range(5): call("publish data.json", 1, publish, "data.json %s" % i) time.sleep(float(random.randint(1, 100)) / 100) - print("Done") + print "Done" - print("Testing cleanup") + print "Testing cleanup" thread = callAsync("publish content.json single", 1, publish, "content.json single") - print("Needs to cleanup:", called_db, queue_db) - print("Waiting 3min for cleanup process...") + print "Needs to cleanup:", called_db, queue_db + print "Waiting 3min for cleanup process..." time.sleep(60 * 3) - print("Cleaned up:", called_db, queue_db) + print "Cleaned up:", called_db, queue_db diff --git a/src/util/SafeRe.py b/src/util/SafeRe.py deleted file mode 100644 index 6018e2d3..00000000 --- a/src/util/SafeRe.py +++ /dev/null @@ -1,32 +0,0 @@ -import re - - -class UnsafePatternError(Exception): - pass - -cached_patterns = {} - - -def isSafePattern(pattern): - if len(pattern) > 255: - raise UnsafePatternError("Pattern too long: %s characters in %s" % (len(pattern), pattern)) - - unsafe_pattern_match = re.search(r"[^\.][\*\{\+]", pattern) # Always should be "." before "*{+" characters to avoid ReDoS - if unsafe_pattern_match: - raise UnsafePatternError("Potentially unsafe part of the pattern: %s in %s" % (unsafe_pattern_match.group(0), pattern)) - - repetitions = re.findall(r"\.[\*\{\+]", pattern) - if len(repetitions) >= 10: - raise UnsafePatternError("More than 10 repetitions of %s in %s" % (repetitions[0], pattern)) - - return True - - -def match(pattern, *args, **kwargs): - cached_pattern = cached_patterns.get(pattern) - if cached_pattern: - return cached_pattern.match(*args, **kwargs) - else: - if isSafePattern(pattern): - cached_patterns[pattern] = re.compile(pattern) - return cached_patterns[pattern].match(*args, **kwargs) diff --git a/src/util/SocksProxy.py b/src/util/SocksProxy.py index f831137b..a11a385d 100644 --- a/src/util/SocksProxy.py +++ b/src/util/SocksProxy.py @@ -1,15 +1,11 @@ import socket -import socks -from Config import config +from lib.PySocks import socks + def create_connection(address, timeout=None, source_address=None): - if address in config.ip_local: - sock = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM) - sock.connect(address) - else: - sock = socks.socksocket() - sock.connect(address) + sock = socks.socksocket() + sock.connect(address) return sock @@ -18,9 +14,9 @@ def getaddrinfo(*args): return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))] -def monkeyPatch(proxy_ip, proxy_port): +def monkeyPath(proxy_ip, proxy_port): + print proxy_ip, proxy_port socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port)) - socket.socket_noproxy = socket.socket socket.socket = socks.socksocket socket.create_connection = create_connection socket.getaddrinfo = getaddrinfo diff --git a/src/util/SslPatch.py b/src/util/SslPatch.py new file mode 100644 index 00000000..1daa7354 --- /dev/null +++ b/src/util/SslPatch.py @@ -0,0 +1,121 @@ +# https://journal.paul.querna.org/articles/2011/04/05/openssl-memory-use/ +# Disable SSL compression to save massive memory and cpu + +import logging +import os + +from Config import config + + +def openLibrary(): + import ctypes + import ctypes.util + try: + if sys.platform.startswith("win"): + dll_path = "src/lib/opensslVerify/libeay32.dll" + elif sys.platform == "cygwin": + dll_path = "/bin/cygcrypto-1.0.0.dll" + else: + dll_path = "/usr/local/ssl/lib/libcrypto.so" + ssl = ctypes.CDLL(dll_path, ctypes.RTLD_GLOBAL) + assert ssl + except: + dll_path = ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or ctypes.util.find_library('libcrypto') + ssl = ctypes.CDLL(dll_path or 'libeay32', ctypes.RTLD_GLOBAL) + return ssl + + +def disableSSLCompression(): + import ctypes + import ctypes.util + try: + openssl = openLibrary() + openssl.SSL_COMP_get_compression_methods.restype = ctypes.c_void_p + except Exception, err: + logging.debug("Disable SSL compression failed: %s (normal on Windows)" % err) + return False + + openssl.sk_zero.argtypes = [ctypes.c_void_p] + openssl.sk_zero(openssl.SSL_COMP_get_compression_methods()) + logging.debug("Disabled SSL compression on %s" % openssl) + + +if config.disable_sslcompression: + try: + disableSSLCompression() + except Exception, err: + logging.debug("Error disabling SSL compression: %s" % err) + + +# https://github.com/gevent/gevent/issues/477 +# Re-add sslwrap to Python 2.7.9 + +__ssl__ = __import__('ssl') + +try: + _ssl = __ssl__._ssl +except AttributeError: + _ssl = __ssl__._ssl2 + +OldSSLSocket = __ssl__.SSLSocket + + +class NewSSLSocket(OldSSLSocket): + # Fix SSLSocket constructor + + def __init__( + self, sock, keyfile=None, certfile=None, server_side=False, + cert_reqs=__ssl__.CERT_REQUIRED, ssl_version=2, ca_certs=None, + do_handshake_on_connect=True, suppress_ragged_eofs=True, ciphers=None, + server_hostname=None, _context=None + ): + OldSSLSocket.__init__( + self, sock, keyfile=keyfile, certfile=certfile, + server_side=server_side, cert_reqs=cert_reqs, + ssl_version=ssl_version, ca_certs=ca_certs, + do_handshake_on_connect=do_handshake_on_connect, + suppress_ragged_eofs=suppress_ragged_eofs, ciphers=ciphers + ) + + +def new_sslwrap( + sock, server_side=False, keyfile=None, certfile=None, + cert_reqs=__ssl__.CERT_NONE, ssl_version=__ssl__.PROTOCOL_SSLv23, + ca_certs=None, ciphers=None +): + context = __ssl__.SSLContext(ssl_version) + context.verify_mode = cert_reqs or __ssl__.CERT_NONE + if ca_certs: + context.load_verify_locations(ca_certs) + if certfile: + context.load_cert_chain(certfile, keyfile) + if ciphers: + context.set_ciphers(ciphers) + + caller_self = inspect.currentframe().f_back.f_locals['self'] + return context._wrap_socket(sock, server_side=server_side, ssl_sock=caller_self) + + +# Re-add sslwrap to Python 2.7.9+ +if not hasattr(_ssl, 'sslwrap'): + import inspect + _ssl.sslwrap = new_sslwrap + __ssl__.SSLSocket = NewSSLSocket + logging.debug("Missing SSLwrap, readded.") + + +# Add SSLContext to gevent.ssl (Ubuntu 15 fix) +try: + import gevent + if not hasattr(gevent.ssl, "SSLContext"): + gevent.ssl.SSLContext = __ssl__.SSLContext + logging.debug("Missing SSLContext, readded.") +except Exception, err: + pass + +# Fix PROTOCOL_SSLv3 not defined +if "PROTOCOL_SSLv3" not in dir(__ssl__): + __ssl__.PROTOCOL_SSLv3 = __ssl__.PROTOCOL_SSLv23 + logging.debug("Redirected PROTOCOL_SSLv3 to PROTOCOL_SSLv23.") + +logging.debug("Python SSL version: %s" % __ssl__.OPENSSL_VERSION) diff --git a/src/util/StreamingMsgpack.py b/src/util/StreamingMsgpack.py new file mode 100644 index 00000000..5ec868c4 --- /dev/null +++ b/src/util/StreamingMsgpack.py @@ -0,0 +1,40 @@ +import os +import struct + +import msgpack + + +def msgpackHeader(size): + if size <= 2 ** 8 - 1: + return b"\xc4" + struct.pack("B", size) + elif size <= 2 ** 16 - 1: + return b"\xc5" + struct.pack(">H", size) + elif size <= 2 ** 32 - 1: + return b"\xc6" + struct.pack(">I", size) + else: + raise Exception("huge binary string") + + +def stream(data, writer): + packer = msgpack.Packer() + writer(packer.pack_map_header(len(data))) + for key, val in data.iteritems(): + writer(packer.pack(key)) + if issubclass(type(val), file): # File obj + max_size = os.fstat(val.fileno()).st_size - val.tell() + size = min(max_size, val.read_bytes) + bytes_left = size + writer(msgpackHeader(size)) + buff = 1024 * 64 + while 1: + writer(val.read(min(bytes_left, buff))) + bytes_left = bytes_left - buff + if bytes_left <= 0: + break + else: # Simple + writer(packer.pack(val)) + return size + + +class FilePart(file): + pass diff --git a/src/util/ThreadPool.py b/src/util/ThreadPool.py deleted file mode 100644 index 5b31ce37..00000000 --- a/src/util/ThreadPool.py +++ /dev/null @@ -1,180 +0,0 @@ -import threading -import time -import queue - -import gevent -import gevent.monkey -import gevent.threadpool -import gevent._threading - - -class ThreadPool: - def __init__(self, max_size, name=None): - self.setMaxSize(max_size) - if name: - self.name = name - else: - self.name = "ThreadPool#%s" % id(self) - - def setMaxSize(self, max_size): - self.max_size = max_size - if max_size > 0: - self.pool = gevent.threadpool.ThreadPool(max_size) - else: - self.pool = None - - def wrap(self, func): - if self.pool is None: - return func - - def wrapper(*args, **kwargs): - if not isMainThread(): # Call directly if not in main thread - return func(*args, **kwargs) - res = self.apply(func, args, kwargs) - return res - - return wrapper - - def spawn(self, *args, **kwargs): - if not isMainThread() and not self.pool._semaphore.ready(): - # Avoid semaphore error when spawning from other thread and the pool is full - return main_loop.call(self.spawn, *args, **kwargs) - res = self.pool.spawn(*args, **kwargs) - return res - - def apply(self, func, args=(), kwargs={}): - t = self.spawn(func, *args, **kwargs) - if self.pool._apply_immediately(): - return main_loop.call(t.get) - else: - return t.get() - - def kill(self): - if self.pool is not None and self.pool.size > 0 and main_loop: - main_loop.call(lambda: gevent.spawn(self.pool.kill).join(timeout=1)) - - del self.pool - self.pool = None - - def __enter__(self): - return self - - def __exit__(self, *args): - self.kill() - - -lock_pool = gevent.threadpool.ThreadPool(50) -main_thread_id = threading.current_thread().ident - - -def isMainThread(): - return threading.current_thread().ident == main_thread_id - - -class Lock: - def __init__(self): - self.lock = gevent._threading.Lock() - self.locked = self.lock.locked - self.release = self.lock.release - self.time_lock = 0 - - def acquire(self, *args, **kwargs): - self.time_lock = time.time() - if self.locked() and isMainThread(): - # Start in new thread to avoid blocking gevent loop - return lock_pool.apply(self.lock.acquire, args, kwargs) - else: - return self.lock.acquire(*args, **kwargs) - - def __del__(self): - while self.locked(): - self.release() - - -class Event: - def __init__(self): - self.get_lock = Lock() - self.res = None - self.get_lock.acquire(False) - self.done = False - - def set(self, res): - if self.done: - raise Exception("Event already has value") - self.res = res - self.get_lock.release() - self.done = True - - def get(self): - if not self.done: - self.get_lock.acquire(True) - if self.get_lock.locked(): - self.get_lock.release() - back = self.res - return back - - def __del__(self): - self.res = None - while self.get_lock.locked(): - self.get_lock.release() - - -# Execute function calls in main loop from other threads -class MainLoopCaller(): - def __init__(self): - self.queue_call = queue.Queue() - - self.pool = gevent.threadpool.ThreadPool(1) - self.num_direct = 0 - self.running = True - - def caller(self, func, args, kwargs, event_done): - try: - res = func(*args, **kwargs) - event_done.set((True, res)) - except Exception as err: - event_done.set((False, err)) - - def start(self): - gevent.spawn(self.run) - time.sleep(0.001) - - def run(self): - while self.running: - if self.queue_call.qsize() == 0: # Get queue in new thread to avoid gevent blocking - func, args, kwargs, event_done = self.pool.apply(self.queue_call.get) - else: - func, args, kwargs, event_done = self.queue_call.get() - gevent.spawn(self.caller, func, args, kwargs, event_done) - del func, args, kwargs, event_done - self.running = False - - def call(self, func, *args, **kwargs): - if threading.current_thread().ident == main_thread_id: - return func(*args, **kwargs) - else: - event_done = Event() - self.queue_call.put((func, args, kwargs, event_done)) - success, res = event_done.get() - del event_done - self.queue_call.task_done() - if success: - return res - else: - raise res - - -def patchSleep(): # Fix memory leak by using real sleep in threads - real_sleep = gevent.monkey.get_original("time", "sleep") - - def patched_sleep(seconds): - if isMainThread(): - gevent.sleep(seconds) - else: - real_sleep(seconds) - time.sleep = patched_sleep - - -main_loop = MainLoopCaller() -main_loop.start() -patchSleep() diff --git a/src/util/UpnpPunch.py b/src/util/UpnpPunch.py index 18f4aaee..eb4b3f16 100644 --- a/src/util/UpnpPunch.py +++ b/src/util/UpnpPunch.py @@ -1,37 +1,22 @@ import re -import urllib.request -import http.client +import urllib2 +import httplib import logging -from urllib.parse import urlparse +from urlparse import urlparse from xml.dom.minidom import parseString -from xml.parsers.expat import ExpatError -from gevent import socket import gevent +from gevent import socket -# Relevant UPnP spec: -# http://www.upnp.org/specs/gw/UPnP-gw-WANIPConnection-v1-Service.pdf +# Relevant UPnP spec: http://www.upnp.org/specs/gw/UPnP-gw-WANIPConnection-v1-Service.pdf # General TODOs: # Handle 0 or >1 IGDs -logger = logging.getLogger("Upnp") - -class UpnpError(Exception): - pass +remove_whitespace = re.compile(r'>\s*<') -class IGDError(UpnpError): - """ - Signifies a problem with the IGD. - """ - pass - - -REMOVE_WHITESPACE = re.compile(r'>\s*<') - - -def perform_m_search(local_ip): +def _m_search_ssdp(local_ip): """ Broadcast a UDP SSDP M-SEARCH packet and return response. """ @@ -44,24 +29,21 @@ def perform_m_search(local_ip): 'MX: 2\r\n', 'ST: {0}\r\n'.format(search_target), '\r\n'] - ).encode("utf8") + ) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - sock.bind((local_ip, 0)) + sock.bind((local_ip, 10000)) sock.sendto(ssdp_request, ('239.255.255.250', 1900)) - if local_ip == "127.0.0.1": - sock.settimeout(1) - else: - sock.settimeout(5) + sock.settimeout(5) try: - return sock.recv(2048).decode("utf8") - except socket.error: - raise UpnpError("No reply from IGD using {} as IP".format(local_ip)) - finally: - sock.close() + return sock.recv(2048) + except socket.error, err: + # no reply from IGD, possibly no IGD on LAN + logging.debug("UDP SSDP M-SEARCH send error using ip %s: %s" % (local_ip, err)) + return False def _retrieve_location_from_ssdp(response): @@ -69,28 +51,24 @@ def _retrieve_location_from_ssdp(response): Parse raw HTTP response to retrieve the UPnP location header and return a ParseResult object. """ - parsed_headers = re.findall(r'(?P.*?): (?P.*?)\r\n', response) - header_locations = [header[1] - for header in parsed_headers - if header[0].lower() == 'location'] + parsed = re.findall(r'(?P.*?): (?P.*?)\r\n', response) + location_header = filter(lambda x: x[0].lower() == 'location', parsed) - if len(header_locations) < 1: - raise IGDError('IGD response does not contain a "location" header.') + if not len(location_header): + # no location header returned :( + return False - return urlparse(header_locations[0]) + return urlparse(location_header[0][1]) def _retrieve_igd_profile(url): """ Retrieve the device's UPnP profile. """ - try: - return urllib.request.urlopen(url.geturl(), timeout=5).read().decode('utf-8') - except socket.error: - raise IGDError('IGD profile query timed out') + return urllib2.urlopen(url.geturl()).read() -def _get_first_child_data(node): +def _node_val(node): """ Get the text value of the first child text node of a node. """ @@ -101,89 +79,34 @@ def _parse_igd_profile(profile_xml): """ Traverse the profile xml DOM looking for either WANIPConnection or WANPPPConnection and return - the 'controlURL' and the service xml schema. + the value found as well as the 'controlURL'. """ - try: - dom = parseString(profile_xml) - except ExpatError as e: - raise IGDError( - 'Unable to parse IGD reply: {0} \n\n\n {1}'.format(profile_xml, e)) + dom = parseString(profile_xml) service_types = dom.getElementsByTagName('serviceType') for service in service_types: - if _get_first_child_data(service).find('WANIPConnection') > 0 or \ - _get_first_child_data(service).find('WANPPPConnection') > 0: - try: - control_url = _get_first_child_data( - service.parentNode.getElementsByTagName('controlURL')[0]) - upnp_schema = _get_first_child_data(service).split(':')[-2] - return control_url, upnp_schema - except IndexError: - # Pass the error because any error here should raise the - # that's specified outside the for loop. - pass - raise IGDError( - 'Could not find a control url or UPNP schema in IGD response.') + if _node_val(service).find('WANIPConnection') > 0 or \ + _node_val(service).find('WANPPPConnection') > 0: + control_url = service.parentNode.getElementsByTagName( + 'controlURL' + )[0].childNodes[0].data + upnp_schema = _node_val(service).split(':')[-2] + return control_url, upnp_schema + + return False -# add description -def _get_local_ips(): - def method1(): - try: - # get local ip using UDP and a broadcast address - s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) - # Not using because gevents getaddrinfo doesn't like that - # using port 1 as per hobbldygoop's comment about port 0 not working on osx: - # https://github.com/sirMackk/ZeroNet/commit/fdcd15cf8df0008a2070647d4d28ffedb503fba2#commitcomment-9863928 - s.connect(('239.255.255.250', 1)) - return [s.getsockname()[0]] - except: - pass - - def method2(): - # Get ip by using UDP and a normal address (google dns ip) - try: - s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s.connect(('8.8.8.8', 0)) - return [s.getsockname()[0]] - except: - pass - - def method3(): - # Get ip by '' hostname . Not supported on all platforms. - try: - return socket.gethostbyname_ex('')[2] - except: - pass - - threads = [ - gevent.spawn(method1), - gevent.spawn(method2), - gevent.spawn(method3) - ] - - gevent.joinall(threads, timeout=5) - - local_ips = [] - for thread in threads: - if thread.value: - local_ips += thread.value - - # Delete duplicates - local_ips = list(set(local_ips)) +def _get_local_ip(): + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + # not using because gevents getaddrinfo doesn't like that + # using port 1 as per hobbldygoop's comment about port 0 not working on osx: + # https://github.com/sirMackk/ZeroNet/commit/fdcd15cf8df0008a2070647d4d28ffedb503fba2#commitcomment-9863928 + s.connect(('239.255.255.250', 1)) + return s.getsockname()[0] - # Probably we looking for an ip starting with 192 - local_ips = sorted(local_ips, key=lambda a: a.startswith("192"), reverse=True) - - return local_ips - - -def _create_open_message(local_ip, - port, - description="UPnPPunch", - protocol="TCP", +def _create_soap_message(local_ip, port, description="UPnPPunch", protocol="TCP", upnp_schema='WANIPConnection'): """ Build a SOAP AddPortMapping message. @@ -208,67 +131,39 @@ def _create_open_message(local_ip, host_ip=local_ip, description=description, upnp_schema=upnp_schema) - return (REMOVE_WHITESPACE.sub('><', soap_message), 'AddPortMapping') - - -def _create_close_message(local_ip, - port, - description=None, - protocol='TCP', - upnp_schema='WANIPConnection'): - soap_message = """ - - - - - {port} - {protocol} - - -""".format(port=port, - protocol=protocol, - upnp_schema=upnp_schema) - return (REMOVE_WHITESPACE.sub('><', soap_message), 'DeletePortMapping') + return remove_whitespace.sub('><', soap_message) def _parse_for_errors(soap_response): - logger.debug(soap_response.status) - if soap_response.status >= 400: - response_data = soap_response.read() - logger.debug(response_data) - try: - err_dom = parseString(response_data) - err_code = _get_first_child_data(err_dom.getElementsByTagName( - 'errorCode')[0]) - err_msg = _get_first_child_data( - err_dom.getElementsByTagName('errorDescription')[0] - ) - except Exception as err: - raise IGDError( - 'Unable to parse SOAP error: {0}. Got: "{1}"'.format( - err, response_data)) - raise IGDError( + if soap_response.status == 500: + err_dom = parseString(soap_response.read()) + err_code = _node_val(err_dom.getElementsByTagName('errorCode')[0]) + err_msg = _node_val( + err_dom.getElementsByTagName('errorDescription')[0] + ) + logging.error('SOAP request error: {0} - {1}'.format(err_code, err_msg)) + raise Exception( 'SOAP request error: {0} - {1}'.format(err_code, err_msg) ) - return soap_response + + return False + else: + return True -def _send_soap_request(location, upnp_schema, control_path, soap_fn, - soap_message): +def _send_soap_request(location, upnp_schema, control_url, soap_message): """ Send out SOAP request to UPnP device and return a response. """ headers = { 'SOAPAction': ( '"urn:schemas-upnp-org:service:{schema}:' - '1#{fn_name}"'.format(schema=upnp_schema, fn_name=soap_fn) + '1#AddPortMapping"'.format(schema=upnp_schema) ), 'Content-Type': 'text/xml' } - logger.debug("Sending UPnP request to {0}:{1}...".format( - location.hostname, location.port)) - conn = http.client.HTTPConnection(location.hostname, location.port) - conn.request('POST', control_path, soap_message, headers) + conn = httplib.HTTPConnection(location.hostname, location.port) + conn.request('POST', control_url, soap_message, headers) response = conn.getresponse() conn.close() @@ -276,120 +171,68 @@ def _send_soap_request(location, upnp_schema, control_path, soap_fn, return _parse_for_errors(response) -def _collect_idg_data(ip_addr): - idg_data = {} - idg_response = perform_m_search(ip_addr) - idg_data['location'] = _retrieve_location_from_ssdp(idg_response) - idg_data['control_path'], idg_data['upnp_schema'] = _parse_igd_profile( - _retrieve_igd_profile(idg_data['location'])) - return idg_data - - -def _send_requests(messages, location, upnp_schema, control_path): - responses = [_send_soap_request(location, upnp_schema, control_path, - message_tup[1], message_tup[0]) - for message_tup in messages] - - if all(rsp.status == 200 for rsp in responses): - return - raise UpnpError('Sending requests using UPnP failed.') - - -def _orchestrate_soap_request(ip, port, msg_fn, desc=None, protos=("TCP", "UDP")): - logger.debug("Trying using local ip: %s" % ip) - idg_data = _collect_idg_data(ip) - - soap_messages = [ - msg_fn(ip, port, desc, proto, idg_data['upnp_schema']) - for proto in protos - ] - - _send_requests(soap_messages, **idg_data) - - -def _communicate_with_igd(port=15441, - desc="UpnpPunch", - retries=3, - fn=_create_open_message, - protos=("TCP", "UDP")): +def open_port(port=15441, desc="UpnpPunch"): """ - Manage sending a message generated by 'fn'. + Attempt to forward a port using UPnP. """ - local_ips = _get_local_ips() - success = False + local_ips = [_get_local_ip()] + try: + local_ips += socket.gethostbyname_ex('')[2] # Get ip by '' hostname not supported on all platform + except: + pass - def job(local_ip): - for retry in range(retries): - try: - _orchestrate_soap_request(local_ip, port, fn, desc, protos) - return True - except Exception as e: - logger.debug('Upnp request using "{0}" failed: {1}'.format(local_ip, e)) - gevent.sleep(1) - return False + try: + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.connect(('8.8.8.8', 0)) # Using google dns route + local_ips.append(s.getsockname()[0]) + except: + pass - threads = [] + local_ips = list(set(local_ips)) # Delete duplicates + logging.debug("Found local ips: %s" % local_ips) + local_ips = local_ips * 3 # Retry every ip 3 times for local_ip in local_ips: - job_thread = gevent.spawn(job, local_ip) - threads.append(job_thread) - gevent.sleep(0.1) - if any([thread.value for thread in threads]): - success = True - break + logging.debug("Trying using local ip: %s" % local_ip) + idg_response = _m_search_ssdp(local_ip) - # Wait another 10sec for competition or any positive result - for _ in range(10): - all_done = all([thread.value is not None for thread in threads]) - any_succeed = any([thread.value for thread in threads]) - if all_done or any_succeed: - break - gevent.sleep(1) + if not idg_response: + logging.debug("No IGD response") + continue - if any([thread.value for thread in threads]): - success = True + location = _retrieve_location_from_ssdp(idg_response) - if not success: - raise UpnpError( - 'Failed to communicate with igd using port {0} on local machine after {1} tries.'.format( - port, retries)) + if not location: + logging.debug("No location") + continue - return success + parsed = _parse_igd_profile( + _retrieve_igd_profile(location) + ) + if not parsed: + logging.debug("IGD parse error using location %s" % repr(location)) + continue -def ask_to_open_port(port=15441, desc="UpnpPunch", retries=3, protos=("TCP", "UDP")): - logger.debug("Trying to open port %d." % port) - return _communicate_with_igd(port=port, - desc=desc, - retries=retries, - fn=_create_open_message, - protos=protos) + control_url, upnp_schema = parsed + soap_messages = [_create_soap_message(local_ip, port, desc, proto, upnp_schema) + for proto in ['TCP', 'UDP']] -def ask_to_close_port(port=15441, desc="UpnpPunch", retries=3, protos=("TCP", "UDP")): - logger.debug("Trying to close port %d." % port) - # retries=1 because multiple successes cause 500 response and failure - return _communicate_with_igd(port=port, - desc=desc, - retries=retries, - fn=_create_close_message, - protos=protos) + requests = [gevent.spawn( + _send_soap_request, location, upnp_schema, control_url, message + ) for message in soap_messages] + gevent.joinall(requests, timeout=3) + + if all([request.value for request in requests]): + return True + return False if __name__ == "__main__": from gevent import monkey - monkey.patch_all() - logging.basicConfig(level=logging.DEBUG) - import time - - s = time.time() - print("Opening port...") - print("Success:", ask_to_open_port(15443, "ZeroNet", protos=["TCP"])) - print("Done in", time.time() - s) - - - print("Closing port...") - print("Success:", ask_to_close_port(15443, "ZeroNet", protos=["TCP"])) - print("Done in", time.time() - s) + monkey.patch_socket() + logging.getLogger().setLevel(logging.DEBUG) + print open_port(15441, "ZeroNet") diff --git a/src/util/__init__.py b/src/util/__init__.py index ab8a8b88..c226368e 100644 --- a/src/util/__init__.py +++ b/src/util/__init__.py @@ -1,4 +1,2 @@ -from .Cached import Cached -from .Event import Event -from .Noparallel import Noparallel -from .Pooled import Pooled +from Event import Event +from Noparallel import Noparallel diff --git a/src/util/helper.py b/src/util/helper.py index 61455b08..9750af53 100644 --- a/src/util/helper.py +++ b/src/util/helper.py @@ -1,110 +1,21 @@ import os -import stat import socket import struct import re import collections import time -import logging -import base64 -import json - -import gevent - -from Config import config -def atomicWrite(dest, content, mode="wb"): - try: - with open(dest + "-tmpnew", mode) as f: - f.write(content) - f.flush() - os.fsync(f.fileno()) - if os.path.isfile(dest + "-tmpold"): # Previous incomplete write - os.rename(dest + "-tmpold", dest + "-tmpold-%s" % time.time()) - if os.path.isfile(dest): # Rename old file to -tmpold - os.rename(dest, dest + "-tmpold") - os.rename(dest + "-tmpnew", dest) - if os.path.isfile(dest + "-tmpold"): - os.unlink(dest + "-tmpold") # Remove old file - return True - except Exception as err: - from Debug import Debug - logging.error( - "File %s write failed: %s, (%s) reverting..." % - (dest, Debug.formatException(err), Debug.formatStack()) - ) - if os.path.isfile(dest + "-tmpold") and not os.path.isfile(dest): - os.rename(dest + "-tmpold", dest) - return False - - -def jsonDumps(data): - content = json.dumps(data, indent=1, sort_keys=True) - - # Make it a little more compact by removing unnecessary white space - def compact_dict(match): - if "\n" in match.group(0): - return match.group(0).replace(match.group(1), match.group(1).strip()) - else: - return match.group(0) - - content = re.sub(r"\{(\n[^,\[\{]{10,100000}?)\}[, ]{0,2}\n", compact_dict, content, flags=re.DOTALL) - - def compact_list(match): - if "\n" in match.group(0): - stripped_lines = re.sub("\n[ ]*", "", match.group(1)) - return match.group(0).replace(match.group(1), stripped_lines) - else: - return match.group(0) - - content = re.sub(r"\[([^\[\{]{2,100000}?)\][, ]{0,2}\n", compact_list, content, flags=re.DOTALL) - - # Remove end of line whitespace - content = re.sub(r"(?m)[ ]+$", "", content) - return content - - -def openLocked(path, mode="wb"): - try: - if os.name == "posix": - import fcntl - f = open(path, mode) - fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB) - elif os.name == "nt": - import msvcrt - f = open(path, mode) - msvcrt.locking(f.fileno(), msvcrt.LK_NBLCK, 1) - else: - f = open(path, mode) - except (IOError, PermissionError, BlockingIOError) as err: - raise BlockingIOError("Unable to lock file: %s" % err) - return f - - -def getFreeSpace(): - free_space = -1 - if "statvfs" in dir(os): # Unix - statvfs = os.statvfs(config.data_dir.encode("utf8")) - free_space = statvfs.f_frsize * statvfs.f_bavail - else: # Windows - try: - import ctypes - free_space_pointer = ctypes.c_ulonglong(0) - ctypes.windll.kernel32.GetDiskFreeSpaceExW( - ctypes.c_wchar_p(config.data_dir), None, None, ctypes.pointer(free_space_pointer) - ) - free_space = free_space_pointer.value - except Exception as err: - logging.error("GetFreeSpace error: %s" % err) - return free_space - - -def sqlquote(value): - if type(value) is int: - return str(value) - else: - return "'%s'" % value.replace("'", "''") +def atomicWrite(dest, content, mode="w"): + with open(dest + "-new", mode) as f: + f.write(content) + f.flush() + os.fsync(f.fileno()) + if os.path.isfile(dest + "-old"): # Previous incomplete write + os.rename(dest + "-old", dest + "-old-%s" % time.time()) + os.rename(dest, dest + "-old") + os.rename(dest + "-new", dest) + os.unlink(dest + "-old") def shellquote(*args): @@ -114,71 +25,29 @@ def shellquote(*args): return tuple(['"%s"' % arg.replace('"', "") for arg in args]) -def packPeers(peers): - packed_peers = {"ipv4": [], "ipv6": [], "onion": []} - for peer in peers: - try: - ip_type = getIpType(peer.ip) - if ip_type in packed_peers: - packed_peers[ip_type].append(peer.packMyAddress()) - except Exception: - logging.debug("Error packing peer address: %s" % peer) - return packed_peers - - -# ip, port to packed 6byte or 18byte format +# ip, port to packed 6byte format def packAddress(ip, port): - if ":" in ip: - return socket.inet_pton(socket.AF_INET6, ip) + struct.pack("H", port) - else: - return socket.inet_aton(ip) + struct.pack("H", port) + return socket.inet_aton(ip) + struct.pack("H", port) -# From 6byte or 18byte format to ip, port +# From 6byte format to ip, port def unpackAddress(packed): - if len(packed) == 18: - return socket.inet_ntop(socket.AF_INET6, packed[0:16]), struct.unpack_from("H", packed, 16)[0] - else: - if len(packed) != 6: - raise Exception("Invalid length ip4 packed address: %s" % len(packed)) - return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0] - - -# onion, port to packed 12byte format -def packOnionAddress(onion, port): - onion = onion.replace(".onion", "") - return base64.b32decode(onion.upper()) + struct.pack("H", port) - - -# From 12byte format to ip, port -def unpackOnionAddress(packed): - return base64.b32encode(packed[0:-2]).lower().decode() + ".onion", struct.unpack("H", packed[-2:])[0] + return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0] # Get dir from file -# Return: data/site/content.json -> data/site/ +# Return: data/site/content.json -> data/site def getDirname(path): - if "/" in path: - return path[:path.rfind("/") + 1].lstrip("/") - else: - return "" + file_dir = re.sub("[^/]*?$", "", path).rstrip("/") + if file_dir: + file_dir += "/" # Add / at end if its not the root + return file_dir # Get dir from file # Return: data/site/content.json -> content.json def getFilename(path): - return path[path.rfind("/") + 1:] - - -def getFilesize(path): - try: - s = os.stat(path) - except Exception: - return None - if stat.S_ISREG(s.st_mode): # Test if it's file - return s.st_size - else: - return None + return re.sub("^.*/", "", path) # Convert hash to hashid for hashfield @@ -190,167 +59,6 @@ def toHashId(hash): def mergeDicts(dicts): back = collections.defaultdict(set) for d in dicts: - for key, val in d.items(): + for key, val in d.iteritems(): back[key].update(val) return dict(back) - - -# Request https url using gevent SSL error workaround -def httpRequest(url, as_file=False): - if url.startswith("http://"): - import urllib.request - response = urllib.request.urlopen(url) - else: # Hack to avoid Python gevent ssl errors - import socket - import http.client - import ssl - - host, request = re.match("https://(.*?)(/.*?)$", url).groups() - - conn = http.client.HTTPSConnection(host) - sock = socket.create_connection((conn.host, conn.port), conn.timeout, conn.source_address) - conn.sock = ssl.wrap_socket(sock, conn.key_file, conn.cert_file) - conn.request("GET", request) - response = conn.getresponse() - if response.status in [301, 302, 303, 307, 308]: - logging.info("Redirect to: %s" % response.getheader('Location')) - response = httpRequest(response.getheader('Location')) - - if as_file: - import io - data = io.BytesIO() - while True: - buff = response.read(1024 * 16) - if not buff: - break - data.write(buff) - return data - else: - return response - - -def timerCaller(secs, func, *args, **kwargs): - gevent.spawn_later(secs, timerCaller, secs, func, *args, **kwargs) - func(*args, **kwargs) - - -def timer(secs, func, *args, **kwargs): - return gevent.spawn_later(secs, timerCaller, secs, func, *args, **kwargs) - - -def create_connection(address, timeout=None, source_address=None): - if address in config.ip_local: - sock = socket.create_connection_original(address, timeout, source_address) - else: - sock = socket.create_connection_original(address, timeout, socket.bind_addr) - return sock - - -def socketBindMonkeyPatch(bind_ip, bind_port): - import socket - logging.info("Monkey patching socket to bind to: %s:%s" % (bind_ip, bind_port)) - socket.bind_addr = (bind_ip, int(bind_port)) - socket.create_connection_original = socket.create_connection - socket.create_connection = create_connection - - -def limitedGzipFile(*args, **kwargs): - import gzip - - class LimitedGzipFile(gzip.GzipFile): - def read(self, size=-1): - return super(LimitedGzipFile, self).read(1024 * 1024 * 25) - return LimitedGzipFile(*args, **kwargs) - - -def avg(items): - if len(items) > 0: - return sum(items) / len(items) - else: - return 0 - - -def isIp(ip): - if ":" in ip: # IPv6 - try: - socket.inet_pton(socket.AF_INET6, ip) - return True - except Exception: - return False - - else: # IPv4 - try: - socket.inet_aton(ip) - return True - except Exception: - return False - - -local_ip_pattern = re.compile(r"^127\.|192\.168\.|10\.|172\.1[6-9]\.|172\.2[0-9]\.|172\.3[0-1]\.|169\.254\.|::1$|fe80") -def isPrivateIp(ip): - return local_ip_pattern.match(ip) - - -def getIpType(ip): - if ip.endswith(".onion"): - return "onion" - elif ":" in ip: - return "ipv6" - elif re.match(r"[0-9\.]+$", ip): - return "ipv4" - else: - return "unknown" - - -def createSocket(ip, sock_type=socket.SOCK_STREAM): - ip_type = getIpType(ip) - if ip_type == "ipv6": - return socket.socket(socket.AF_INET6, sock_type) - else: - return socket.socket(socket.AF_INET, sock_type) - - -def getInterfaceIps(ip_type="ipv4"): - res = [] - if ip_type == "ipv6": - test_ips = ["ff0e::c", "2606:4700:4700::1111"] - else: - test_ips = ['239.255.255.250', "8.8.8.8"] - - for test_ip in test_ips: - try: - s = createSocket(test_ip, sock_type=socket.SOCK_DGRAM) - s.connect((test_ip, 1)) - res.append(s.getsockname()[0]) - except Exception: - pass - - try: - res += [ip[4][0] for ip in socket.getaddrinfo(socket.gethostname(), 1)] - except Exception: - pass - - res = [re.sub("%.*", "", ip) for ip in res if getIpType(ip) == ip_type and isIp(ip)] - return list(set(res)) - - -def cmp(a, b): - return (a > b) - (a < b) - - -def encodeResponse(func): # Encode returned data from utf8 to bytes - def wrapper(*args, **kwargs): - back = func(*args, **kwargs) - if "__next__" in dir(back): - for part in back: - if type(part) == bytes: - yield part - else: - yield part.encode() - else: - if type(back) == bytes: - yield back - else: - yield back.encode() - - return wrapper diff --git a/start.py b/start.py index 063d7802..5a612a8d 100644 --- a/start.py +++ b/start.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python # Included modules @@ -9,9 +9,8 @@ import zeronet def main(): - if "--open_browser" not in sys.argv: - sys.argv = [sys.argv[0]] + ["--open_browser", "default_browser"] + sys.argv[1:] - zeronet.start() + sys.argv += ["--open_browser", "default_browser"] + zeronet.main() if __name__ == '__main__': main() diff --git a/tools/coffee/coffee-script.js b/tools/coffee/coffee-script.js index 7fce39a6..06671c21 100644 --- a/tools/coffee/coffee-script.js +++ b/tools/coffee/coffee-script.js @@ -1,405 +1,12 @@ /** - * CoffeeScript Compiler v1.12.6 + * CoffeeScript Compiler v1.10.0 * http://coffeescript.org * * Copyright 2011, Jeremy Ashkenas * Released under the MIT License */ -var $jscomp=$jscomp||{};$jscomp.scope={};$jscomp.checkStringArgs=function(u,xa,va){if(null==u)throw new TypeError("The 'this' value for String.prototype."+va+" must not be null or undefined");if(xa instanceof RegExp)throw new TypeError("First argument to String.prototype."+va+" must not be a regular expression");return u+""}; -$jscomp.defineProperty="function"==typeof Object.defineProperties?Object.defineProperty:function(u,xa,va){if(va.get||va.set)throw new TypeError("ES3 does not support getters and setters.");u!=Array.prototype&&u!=Object.prototype&&(u[xa]=va.value)};$jscomp.getGlobal=function(u){return"undefined"!=typeof window&&window===u?u:"undefined"!=typeof global&&null!=global?global:u};$jscomp.global=$jscomp.getGlobal(this); -$jscomp.polyfill=function(u,xa,va,f){if(xa){va=$jscomp.global;u=u.split(".");for(f=0;fu||1342177279>>=1)va+=va;return f}},"es6-impl","es3");$jscomp.findInternal=function(u,xa,va){u instanceof String&&(u=String(u));for(var f=u.length,qa=0;qa>>=1,a+=a;return g};f.compact=function(a){var g,b;var n=[];var y=0;for(b=a.length;yc)return m.call(this,L,a-1);(w=L[0],0<=y.call(g,w))?c+=1:(l=L[0],0<=y.call(h,l))&&--c;a+=1}return a-1};l.prototype.removeLeadingNewlines=function(){var a,b;var m=this.tokens;var k=a=0;for(b=m.length;ag;f=0<=g?++b:--b){for(;"HERECOMMENT"===this.tag(l+f+c);)c+=2;if(null!=h[f]&&("string"===typeof h[f]&&(h[f]=[h[f]]),k=this.tag(l+f+c),0>y.call(h[f],k)))return-1}return l+f+c-1};l.prototype.looksObjectish=function(a){if(-1y.call(b,w))&&((f=this.tag(a),0>y.call(g,f))||this.tokens[a].generated)&&(n=this.tag(a),0>y.call(R,n)));)(k=this.tag(a),0<=y.call(h,k))&&c.push(this.tag(a)),(l=this.tag(a),0<=y.call(g, -l))&&c.length&&c.pop(),--a;return x=this.tag(a),0<=y.call(b,x)};l.prototype.addImplicitBracesAndParens=function(){var a=[];var l=null;return this.scanTokens(function(c,k,f){var m,w,n,r;var G=c[0];var K=(m=0y.call(h,a):return l[1];case "@"!==this.tag(k-2):return k-2;default:return k-1}}.call(this);"HERECOMMENT"===this.tag(q-2);)q-=2;this.insideForDeclaration="FOR"===u;m=0===q||(r=this.tag(q-1),0<=y.call(R,r))||f[q-1].newLine;if(B()&&(T=B(),r=T[0],v=T[1],("{"===r||"INDENT"===r&&"{"===this.tag(v-1))&&(m||","===this.tag(q-1)||"{"===this.tag(q-1))))return A(1);M(q,!!m);return A(2)}if(0<=y.call(R,G))for(M=a.length-1;0<=M;M+=-1)r=a[M],E(r)&&(r[2].sameLine= -!1);M="OUTDENT"===K||m.newLine;if(0<=y.call(x,G)||0<=y.call(z,G)&&M)for(;O();)if(M=B(),r=M[0],v=M[1],m=M[2],M=m.sameLine,m=m.startsLine,C()&&","!==K)S();else if(T()&&!this.insideForDeclaration&&M&&"TERMINATOR"!==G&&":"!==K)q();else if(!T()||"TERMINATOR"!==G||","===K||m&&this.looksObjectish(k+1))break;else{if("HERECOMMENT"===u)return A(1);q()}if(!(","!==G||this.looksObjectish(k+1)||!T()||this.insideForDeclaration||"TERMINATOR"===u&&this.looksObjectish(k+2)))for(u="OUTDENT"===u?1:0;T();)q(k+u);return A(1)})}; -l.prototype.addLocationDataToGeneratedTokens=function(){return this.scanTokens(function(a,b,g){var c,l;if(a[2]||!a.generated&&!a.explicit)return 1;if("{"===a[0]&&(c=null!=(l=g[b+1])?l[2]:void 0)){var m=c.first_line;c=c.first_column}else(c=null!=(m=g[b-1])?m[2]:void 0)?(m=c.last_line,c=c.last_column):m=c=0;a[2]={first_line:m,first_column:c,last_line:m,last_column:c};return 1})};l.prototype.fixOutdentLocationData=function(){return this.scanTokens(function(a,b,g){if(!("OUTDENT"===a[0]||a.generated&& -"CALL_END"===a[0]||a.generated&&"}"===a[0]))return 1;b=g[b-1][2];a[2]={first_line:b.last_line,first_column:b.last_column,last_line:b.last_line,last_column:b.last_column};return 1})};l.prototype.normalizeLines=function(){var b,g;var l=b=g=null;var k=function(a,b){var c,g,k,f;return";"!==a[1]&&(c=a[0],0<=y.call(O,c))&&!("TERMINATOR"===a[0]&&(g=this.tag(b+1),0<=y.call(H,g)))&&!("ELSE"===a[0]&&"THEN"!==l)&&!!("CATCH"!==(k=a[0])&&"FINALLY"!==k||"-\x3e"!==l&&"\x3d\x3e"!==l)||(f=a[0],0<=y.call(z,f))&&(this.tokens[b- -1].newLine||"OUTDENT"===this.tokens[b-1][0])};var f=function(a,b){return this.tokens.splice(","===this.tag(b-1)?b-1:b,0,g)};return this.scanTokens(function(c,m,h){var w,n,r;c=c[0];if("TERMINATOR"===c){if("ELSE"===this.tag(m+1)&&"OUTDENT"!==this.tag(m-1))return h.splice.apply(h,[m,1].concat(a.call(this.indentation()))),1;if(w=this.tag(m+1),0<=y.call(H,w))return h.splice(m,1),0}if("CATCH"===c)for(w=n=1;2>=n;w=++n)if("OUTDENT"===(r=this.tag(m+w))||"TERMINATOR"===r||"FINALLY"===r)return h.splice.apply(h, -[m+w,0].concat(a.call(this.indentation()))),2+w;0<=y.call(J,c)&&"INDENT"!==this.tag(m+1)&&("ELSE"!==c||"IF"!==this.tag(m+1))&&(l=c,r=this.indentation(h[m]),b=r[0],g=r[1],"THEN"===l&&(b.fromThen=!0),h.splice(m+1,0,b),this.detectEnd(m+2,k,f),"THEN"===c&&h.splice(m,1));return 1})};l.prototype.tagPostfixConditionals=function(){var a=null;var b=function(a,b){a=a[0];b=this.tokens[b-1][0];return"TERMINATOR"===a||"INDENT"===a&&0>y.call(J,b)};var g=function(b,c){if("INDENT"!==b[0]||b.generated&&!b.fromThen)return a[0]= -"POST_"+a[0]};return this.scanTokens(function(c,l){if("IF"!==c[0])return 1;a=c;this.detectEnd(l+1,b,g);return 1})};l.prototype.indentation=function(a){var b=["INDENT",2];var c=["OUTDENT",2];a?(b.generated=c.generated=!0,b.origin=c.origin=a):b.explicit=c.explicit=!0;return[b,c]};l.prototype.generate=b;l.prototype.tag=function(a){var b;return null!=(b=this.tokens[a])?b[0]:void 0};return l}();var ya=[["(",")"],["[","]"],["{","}"],["INDENT","OUTDENT"],["CALL_START","CALL_END"],["PARAM_START","PARAM_END"], -["INDEX_START","INDEX_END"],["STRING_START","STRING_END"],["REGEX_START","REGEX_END"]];f.INVERSES=u={};var g=[];var h=[];var r=0;for(q=ya.length;rthis.indent){if(c||"RETURN"===this.tag())return this.indebt=b-this.indent,this.suppressNewlines(),a.length;if(!this.tokens.length)return this.baseIndent= -this.indent=b,a.length;c=b-this.indent+this.outdebt;this.token("INDENT",c,a.length-b,b);this.indents.push(c);this.ends.push({tag:"OUTDENT"});this.outdebt=this.indebt=0;this.indent=b}else bl&&(m=this.token("+","+"),m[2]={first_line:w[2].first_line,first_column:w[2].first_column,last_line:w[2].first_line,last_column:w[2].first_column});(f=this.tokens).push.apply(f,r)}if(k)return a=a[a.length-1],k.origin=["STRING",null,{first_line:k[2].first_line,first_column:k[2].first_column,last_line:a[2].last_line,last_column:a[2].last_column}],k=this.token("STRING_END",")"),k[2]={first_line:a[2].last_line,first_column:a[2].last_column, -last_line:a[2].last_line,last_column:a[2].last_column}};a.prototype.pair=function(a){var b=this.ends;b=b[b.length-1];return a!==(b=null!=b?b.tag:void 0)?("OUTDENT"!==b&&this.error("unmatched "+a),b=this.indents,b=b[b.length-1],this.outdentToken(b,!0),this.pair(a)):this.ends.pop()};a.prototype.getLineAndColumnFromChunk=function(a){if(0===a)return[this.chunkLine,this.chunkColumn];var b=a>=this.chunk.length?this.chunk:this.chunk.slice(0,+(a-1)+1||9E9);a=g(b,"\n");var c=this.chunkColumn;0a)return b(a);var c=Math.floor((a-65536)/1024)+55296;a=(a-65536)%1024+56320;return""+b(c)+b(a)};a.prototype.replaceUnicodeCodePointEscapes= -function(a,b){return a.replace(sa,function(a){return function(c,g,k,h){if(g)return g;c=parseInt(k,16);1114111q.call(y.call(I).concat(y.call(F)),a):return"keyword '"+b+"' can't be assigned";case 0>q.call(O, -a):return"'"+b+"' can't be assigned";case 0>q.call(J,a):return"reserved word '"+b+"' can't be assigned";default:return!1}};f.isUnassignable=B;var H=function(a){var b;return"IDENTIFIER"===a[0]?("from"===a[1]&&(a[1][0]="IDENTIFIER",!0),!0):"FOR"===a[0]?!1:"{"===(b=a[1])||"["===b||","===b||":"===b?!1:!0};var I="true false null this new delete typeof in instanceof return throw break continue debugger yield if else switch for while do try catch finally class extends super import export default".split(" "); -var F="undefined Infinity NaN then unless until loop of by when".split(" ");var Q={and:"\x26\x26",or:"||",is:"\x3d\x3d",isnt:"!\x3d",not:"!",yes:"true",no:"false",on:"true",off:"false"};var x=function(){var a=[];for(qa in Q)a.push(qa);return a}();F=F.concat(x);var J="case function var void with const let enum native implements interface package private protected public static".split(" ");var O=["arguments","eval"];f.JS_FORBIDDEN=I.concat(J).concat(O);var R=65279;var z=/^(?!\d)((?:(?!\s)[$\w\x7f-\uffff])+)([^\n\S]*:(?!:))?/; -var l=/^0b[01]+|^0o[0-7]+|^0x[\da-f]+|^\d*\.?\d+(?:e[+-]?\d+)?/i;var c=/^(?:[-=]>|[-+*\/%<>&|^!?=]=|>>>=?|([-+:])\1|([&|<>*\/%])\2=?|\?(\.|::)|\.{2,3})/;var w=/^[^\n\S]+/;var m=/^###([^#][\s\S]*?)(?:###[^\n\S]*|###$)|^(?:\s*#(?!##[^#]).*)+/;var k=/^[-=]>/;var K=/^(?:\n[^\n\S]*)+/;var P=/^`(?!``)((?:[^`\\]|\\[\s\S])*)`/;var L=/^```((?:[^`\\]|\\[\s\S]|`(?!``))*)```/;var V=/^(?:'''|"""|'|")/;var X=/^(?:[^\\']|\\[\s\S])*/;var G=/^(?:[^\\"#]|\\[\s\S]|\#(?!\{))*/;var aa=/^(?:[^\\']|\\[\s\S]|'(?!''))*/; -var U=/^(?:[^\\"#]|\\[\s\S]|"(?!"")|\#(?!\{))*/;var W=/((?:\\\\)+)|\\[^\S\n]*\n\s*/g;var D=/\s*\n\s*/g;var A=/\n+([^\n\S]*)(?=\S)/g;var fc=/^\/(?!\/)((?:[^[\/\n\\]|\\[^\n]|\[(?:\\[^\n]|[^\]\n\\])*\])*)(\/)?/;var E=/^\w*/;var ba=/^(?!.*(.).*\1)[imguy]*$/;var ca=/^(?:[^\\\/#]|\\[\s\S]|\/(?!\/\/)|\#(?!\{))*/;var C=/((?:\\\\)+)|\\(\s)|\s+(?:#.*)?/g;var T=/^(\/|\/{3}\s*)(\*)/;var v=/^\/=?\s/;var Y=/\*\//;var S=/^\s*(?:,|\??\.(?![.\d])|::)/;var M=/((?:^|[^\\])(?:\\\\)*)\\(?:(0[0-7]|[1-7])|(x(?![\da-fA-F]{2}).{0,2})|(u\{(?![\da-fA-F]{1,}\})[^}]*\}?)|(u(?!\{|[\da-fA-F]{4}).{0,4}))/; -var va=/((?:^|[^\\])(?:\\\\)*)\\(?:(0[0-7])|(x(?![\da-fA-F]{2}).{0,2})|(u\{(?![\da-fA-F]{1,}\})[^}]*\}?)|(u(?!\{|[\da-fA-F]{4}).{0,4}))/;var sa=/(\\\\)|\\u\{([\da-fA-F]+)\}/g;var za=/^[^\n\S]*\n/;var ma=/\n[^\n\S]*$/;var Z=/\s+$/;var fa="-\x3d +\x3d /\x3d *\x3d %\x3d ||\x3d \x26\x26\x3d ?\x3d \x3c\x3c\x3d \x3e\x3e\x3d \x3e\x3e\x3e\x3d \x26\x3d ^\x3d |\x3d **\x3d //\x3d %%\x3d".split(" ");var ia=["NEW","TYPEOF","DELETE","DO"];var ga=["!","~"];var ja=["\x3c\x3c","\x3e\x3e","\x3e\x3e\x3e"];var la="\x3d\x3d !\x3d \x3c \x3e \x3c\x3d \x3e\x3d".split(" "); -var oa=["*","/","%","//","%%"];var pa=["IN","OF","INSTANCEOF"];var ha="IDENTIFIER PROPERTY ) ] ? @ THIS SUPER".split(" ");var ka=ha.concat("NUMBER INFINITY NAN STRING STRING_END REGEX REGEX_END BOOL NULL UNDEFINED } ::".split(" "));var na=ka.concat(["++","--"]);var ra=["INDENT","OUTDENT","TERMINATOR"];var da=[")","}","]"]}).call(this);return f}();u["./parser"]=function(){var f={},qa={exports:f},q=function(){function f(){this.yy={}}var a=function(a,p,t,d){t=t||{};for(d=a.length;d--;t[a[d]]=p);return t}, -b=[1,22],u=[1,25],g=[1,83],h=[1,79],r=[1,84],n=[1,85],B=[1,81],H=[1,82],I=[1,56],F=[1,58],Q=[1,59],x=[1,60],J=[1,61],O=[1,62],R=[1,49],z=[1,50],l=[1,32],c=[1,68],w=[1,69],m=[1,78],k=[1,47],K=[1,51],P=[1,52],L=[1,67],V=[1,65],X=[1,66],G=[1,64],aa=[1,42],U=[1,48],W=[1,63],D=[1,73],A=[1,74],q=[1,75],E=[1,76],ba=[1,46],ca=[1,72],C=[1,34],T=[1,35],v=[1,36],Y=[1,37],S=[1,38],M=[1,39],qa=[1,86],sa=[1,6,32,42,131],za=[1,101],ma=[1,89],Z=[1,88],fa=[1,87],ia=[1,90],ga=[1,91],ja=[1,92],la=[1,93],oa=[1,94],pa= -[1,95],ha=[1,96],ka=[1,97],na=[1,98],ra=[1,99],da=[1,100],va=[1,104],N=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],xa=[2,166],ta=[1,110],Na=[1,111],Fa=[1,112],Ga=[1,113],Ca=[1,115],Pa=[1,116],Ia=[1,109],Ea=[1,6,32,42,131,133,135,139,156],Va=[2,27],ea=[1,123],Ya=[1,121],Ba=[1,6,31,32,40,41,42,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172, -173,174],Ha=[2,94],t=[1,6,31,32,42,46,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],p=[2,73],d=[1,128],wa=[1,133],e=[1,134],Da=[1,136],Ta=[1,6,31,32,40,41,42,55,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],ua=[2,91],Eb=[1,6,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168, -169,170,171,172,173,174],Za=[2,63],Fb=[1,166],$a=[1,178],Ua=[1,180],Gb=[1,175],Oa=[1,182],sb=[1,184],La=[1,6,31,32,40,41,42,55,65,70,73,82,83,84,85,87,89,90,94,96,113,114,115,120,122,131,133,134,135,139,140,156,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175],Hb=[2,110],Ib=[1,6,31,32,40,41,42,58,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],Jb=[1,6,31,32,40,41,42,46,58,65,70,73,82,83,84, -85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],Kb=[40,41,114],Lb=[1,241],tb=[1,240],Ma=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156],Ja=[2,71],Mb=[1,250],Sa=[6,31,32,65,70],fb=[6,31,32,55,65,70,73],ab=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,164,166,167,168,169,170,171,172,173,174],Nb=[40,41,82,83,84,85,87,90,113,114],gb=[1,269],bb=[2,62],hb=[1,279],Wa=[1,281],ub=[1, -286],cb=[1,288],Ob=[2,187],vb=[1,6,31,32,40,41,42,55,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,146,147,148,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],ib=[1,297],Qa=[6,31,32,70,115,120],Pb=[1,6,31,32,40,41,42,55,58,65,70,73,82,83,84,85,87,89,90,94,96,113,114,115,120,122,131,133,134,135,139,140,146,147,148,156,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175],Qb=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,140,156],Xa=[1,6,31,32, -42,65,70,73,89,94,115,120,122,131,134,140,156],jb=[146,147,148],kb=[70,146,147,148],lb=[6,31,94],Rb=[1,311],Aa=[6,31,32,70,94],Sb=[6,31,32,58,70,94],wb=[6,31,32,55,58,70,94],Tb=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,166,167,168,169,170,171,172,173,174],Ub=[12,28,34,38,40,41,44,45,48,49,50,51,52,53,61,62,63,67,68,89,92,95,97,105,112,117,118,119,125,129,130,133,135,137,139,149,155,157,158,159,160,161,162],Vb=[2,176],Ra=[6,31,32],db=[2,72],Wb=[1,323],Xb=[1,324], -Yb=[1,6,31,32,42,65,70,73,89,94,115,120,122,127,128,131,133,134,135,139,140,151,153,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],mb=[32,151,153],Zb=[1,6,32,42,65,70,73,89,94,115,120,122,131,134,140,156],nb=[1,350],xb=[1,356],yb=[1,6,32,42,131,156],eb=[2,86],ob=[1,367],pb=[1,368],$b=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,151,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],zb=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,135,139,140,156],ac= -[1,381],bc=[1,382],Ab=[6,31,32,94],cc=[6,31,32,70],Bb=[1,6,31,32,42,65,70,73,89,94,115,120,122,127,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],dc=[31,70],qb=[1,408],rb=[1,409],Cb=[1,415],Db=[1,416],ec={trace:function(){},yy:{},symbols_:{error:2,Root:3,Body:4,Line:5,TERMINATOR:6,Expression:7,Statement:8,YieldReturn:9,Return:10,Comment:11,STATEMENT:12,Import:13,Export:14,Value:15,Invocation:16,Code:17,Operation:18,Assign:19,If:20,Try:21,While:22,For:23,Switch:24, -Class:25,Throw:26,Yield:27,YIELD:28,FROM:29,Block:30,INDENT:31,OUTDENT:32,Identifier:33,IDENTIFIER:34,Property:35,PROPERTY:36,AlphaNumeric:37,NUMBER:38,String:39,STRING:40,STRING_START:41,STRING_END:42,Regex:43,REGEX:44,REGEX_START:45,REGEX_END:46,Literal:47,JS:48,UNDEFINED:49,NULL:50,BOOL:51,INFINITY:52,NAN:53,Assignable:54,"\x3d":55,AssignObj:56,ObjAssignable:57,":":58,SimpleObjAssignable:59,ThisProperty:60,RETURN:61,HERECOMMENT:62,PARAM_START:63,ParamList:64,PARAM_END:65,FuncGlyph:66,"-\x3e":67, -"\x3d\x3e":68,OptComma:69,",":70,Param:71,ParamVar:72,"...":73,Array:74,Object:75,Splat:76,SimpleAssignable:77,Accessor:78,Parenthetical:79,Range:80,This:81,".":82,"?.":83,"::":84,"?::":85,Index:86,INDEX_START:87,IndexValue:88,INDEX_END:89,INDEX_SOAK:90,Slice:91,"{":92,AssignList:93,"}":94,CLASS:95,EXTENDS:96,IMPORT:97,ImportDefaultSpecifier:98,ImportNamespaceSpecifier:99,ImportSpecifierList:100,ImportSpecifier:101,AS:102,DEFAULT:103,IMPORT_ALL:104,EXPORT:105,ExportSpecifierList:106,EXPORT_ALL:107, -ExportSpecifier:108,OptFuncExist:109,Arguments:110,Super:111,SUPER:112,FUNC_EXIST:113,CALL_START:114,CALL_END:115,ArgList:116,THIS:117,"@":118,"[":119,"]":120,RangeDots:121,"..":122,Arg:123,SimpleArgs:124,TRY:125,Catch:126,FINALLY:127,CATCH:128,THROW:129,"(":130,")":131,WhileSource:132,WHILE:133,WHEN:134,UNTIL:135,Loop:136,LOOP:137,ForBody:138,FOR:139,BY:140,ForStart:141,ForSource:142,ForVariables:143,OWN:144,ForValue:145,FORIN:146,FOROF:147,FORFROM:148,SWITCH:149,Whens:150,ELSE:151,When:152,LEADING_WHEN:153, -IfBlock:154,IF:155,POST_IF:156,UNARY:157,UNARY_MATH:158,"-":159,"+":160,"--":161,"++":162,"?":163,MATH:164,"**":165,SHIFT:166,COMPARE:167,"\x26":168,"^":169,"|":170,"\x26\x26":171,"||":172,"BIN?":173,RELATION:174,COMPOUND_ASSIGN:175,$accept:0,$end:1},terminals_:{2:"error",6:"TERMINATOR",12:"STATEMENT",28:"YIELD",29:"FROM",31:"INDENT",32:"OUTDENT",34:"IDENTIFIER",36:"PROPERTY",38:"NUMBER",40:"STRING",41:"STRING_START",42:"STRING_END",44:"REGEX",45:"REGEX_START",46:"REGEX_END",48:"JS",49:"UNDEFINED", -50:"NULL",51:"BOOL",52:"INFINITY",53:"NAN",55:"\x3d",58:":",61:"RETURN",62:"HERECOMMENT",63:"PARAM_START",65:"PARAM_END",67:"-\x3e",68:"\x3d\x3e",70:",",73:"...",82:".",83:"?.",84:"::",85:"?::",87:"INDEX_START",89:"INDEX_END",90:"INDEX_SOAK",92:"{",94:"}",95:"CLASS",96:"EXTENDS",97:"IMPORT",102:"AS",103:"DEFAULT",104:"IMPORT_ALL",105:"EXPORT",107:"EXPORT_ALL",112:"SUPER",113:"FUNC_EXIST",114:"CALL_START",115:"CALL_END",117:"THIS",118:"@",119:"[",120:"]",122:"..",125:"TRY",127:"FINALLY",128:"CATCH", -129:"THROW",130:"(",131:")",133:"WHILE",134:"WHEN",135:"UNTIL",137:"LOOP",139:"FOR",140:"BY",144:"OWN",146:"FORIN",147:"FOROF",148:"FORFROM",149:"SWITCH",151:"ELSE",153:"LEADING_WHEN",155:"IF",156:"POST_IF",157:"UNARY",158:"UNARY_MATH",159:"-",160:"+",161:"--",162:"++",163:"?",164:"MATH",165:"**",166:"SHIFT",167:"COMPARE",168:"\x26",169:"^",170:"|",171:"\x26\x26",172:"||",173:"BIN?",174:"RELATION",175:"COMPOUND_ASSIGN"},productions_:[0,[3,0],[3,1],[4,1],[4,3],[4,2],[5,1],[5,1],[5,1],[8,1],[8,1],[8, -1],[8,1],[8,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[27,1],[27,2],[27,3],[30,2],[30,3],[33,1],[35,1],[37,1],[37,1],[39,1],[39,3],[43,1],[43,3],[47,1],[47,1],[47,1],[47,1],[47,1],[47,1],[47,1],[47,1],[19,3],[19,4],[19,5],[56,1],[56,3],[56,5],[56,3],[56,5],[56,1],[59,1],[59,1],[59,1],[57,1],[57,1],[10,2],[10,1],[9,3],[9,2],[11,1],[17,5],[17,2],[66,1],[66,1],[69,0],[69,1],[64,0],[64,1],[64,3],[64,4],[64,6],[71,1],[71,2],[71,3],[71,1],[72,1],[72,1],[72,1],[72, -1],[76,2],[77,1],[77,2],[77,2],[77,1],[54,1],[54,1],[54,1],[15,1],[15,1],[15,1],[15,1],[15,1],[78,2],[78,2],[78,2],[78,2],[78,1],[78,1],[86,3],[86,2],[88,1],[88,1],[75,4],[93,0],[93,1],[93,3],[93,4],[93,6],[25,1],[25,2],[25,3],[25,4],[25,2],[25,3],[25,4],[25,5],[13,2],[13,4],[13,4],[13,5],[13,7],[13,6],[13,9],[100,1],[100,3],[100,4],[100,4],[100,6],[101,1],[101,3],[101,1],[101,3],[98,1],[99,3],[14,3],[14,5],[14,2],[14,4],[14,5],[14,6],[14,3],[14,4],[14,7],[106,1],[106,3],[106,4],[106,4],[106,6],[108, -1],[108,3],[108,3],[108,1],[108,3],[16,3],[16,3],[16,3],[16,1],[111,1],[111,2],[109,0],[109,1],[110,2],[110,4],[81,1],[81,1],[60,2],[74,2],[74,4],[121,1],[121,1],[80,5],[91,3],[91,2],[91,2],[91,1],[116,1],[116,3],[116,4],[116,4],[116,6],[123,1],[123,1],[123,1],[124,1],[124,3],[21,2],[21,3],[21,4],[21,5],[126,3],[126,3],[126,2],[26,2],[79,3],[79,5],[132,2],[132,4],[132,2],[132,4],[22,2],[22,2],[22,2],[22,1],[136,2],[136,2],[23,2],[23,2],[23,2],[138,2],[138,4],[138,2],[141,2],[141,3],[145,1],[145,1], -[145,1],[145,1],[143,1],[143,3],[142,2],[142,2],[142,4],[142,4],[142,4],[142,6],[142,6],[142,2],[142,4],[24,5],[24,7],[24,4],[24,6],[150,1],[150,2],[152,3],[152,4],[154,3],[154,5],[20,1],[20,3],[20,3],[20,3],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,5],[18,4],[18,3]],performAction:function(a,p,t,d,wa,b,e){a=b.length-1;switch(wa){case 1:return this.$=d.addLocationDataFn(e[a],e[a])(new d.Block); -case 2:return this.$=b[a];case 3:this.$=d.addLocationDataFn(e[a],e[a])(d.Block.wrap([b[a]]));break;case 4:this.$=d.addLocationDataFn(e[a-2],e[a])(b[a-2].push(b[a]));break;case 5:this.$=b[a-1];break;case 6:case 7:case 8:case 9:case 10:case 12:case 13:case 14:case 15:case 16:case 17:case 18:case 19:case 20:case 21:case 22:case 23:case 24:case 25:case 26:case 35:case 40:case 42:case 56:case 57:case 58:case 59:case 60:case 61:case 71:case 72:case 82:case 83:case 84:case 85:case 90:case 91:case 94:case 98:case 104:case 163:case 187:case 188:case 190:case 220:case 221:case 239:case 245:this.$= -b[a];break;case 11:this.$=d.addLocationDataFn(e[a],e[a])(new d.StatementLiteral(b[a]));break;case 27:this.$=d.addLocationDataFn(e[a],e[a])(new d.Op(b[a],new d.Value(new d.Literal(""))));break;case 28:case 249:case 250:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op(b[a-1],b[a]));break;case 29:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Op(b[a-2].concat(b[a-1]),b[a]));break;case 30:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Block);break;case 31:case 105:this.$=d.addLocationDataFn(e[a-2],e[a])(b[a- -1]);break;case 32:this.$=d.addLocationDataFn(e[a],e[a])(new d.IdentifierLiteral(b[a]));break;case 33:this.$=d.addLocationDataFn(e[a],e[a])(new d.PropertyName(b[a]));break;case 34:this.$=d.addLocationDataFn(e[a],e[a])(new d.NumberLiteral(b[a]));break;case 36:this.$=d.addLocationDataFn(e[a],e[a])(new d.StringLiteral(b[a]));break;case 37:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.StringWithInterpolations(b[a-1]));break;case 38:this.$=d.addLocationDataFn(e[a],e[a])(new d.RegexLiteral(b[a]));break; -case 39:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.RegexWithInterpolations(b[a-1].args));break;case 41:this.$=d.addLocationDataFn(e[a],e[a])(new d.PassthroughLiteral(b[a]));break;case 43:this.$=d.addLocationDataFn(e[a],e[a])(new d.UndefinedLiteral);break;case 44:this.$=d.addLocationDataFn(e[a],e[a])(new d.NullLiteral);break;case 45:this.$=d.addLocationDataFn(e[a],e[a])(new d.BooleanLiteral(b[a]));break;case 46:this.$=d.addLocationDataFn(e[a],e[a])(new d.InfinityLiteral(b[a]));break;case 47:this.$= -d.addLocationDataFn(e[a],e[a])(new d.NaNLiteral);break;case 48:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Assign(b[a-2],b[a]));break;case 49:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Assign(b[a-3],b[a]));break;case 50:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Assign(b[a-4],b[a-1]));break;case 51:case 87:case 92:case 93:case 95:case 96:case 97:case 222:case 223:this.$=d.addLocationDataFn(e[a],e[a])(new d.Value(b[a]));break;case 52:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Assign(d.addLocationDataFn(e[a- -2])(new d.Value(b[a-2])),b[a],"object",{operatorToken:d.addLocationDataFn(e[a-1])(new d.Literal(b[a-1]))}));break;case 53:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Assign(d.addLocationDataFn(e[a-4])(new d.Value(b[a-4])),b[a-1],"object",{operatorToken:d.addLocationDataFn(e[a-3])(new d.Literal(b[a-3]))}));break;case 54:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Assign(d.addLocationDataFn(e[a-2])(new d.Value(b[a-2])),b[a],null,{operatorToken:d.addLocationDataFn(e[a-1])(new d.Literal(b[a-1]))})); -break;case 55:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Assign(d.addLocationDataFn(e[a-4])(new d.Value(b[a-4])),b[a-1],null,{operatorToken:d.addLocationDataFn(e[a-3])(new d.Literal(b[a-3]))}));break;case 62:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Return(b[a]));break;case 63:this.$=d.addLocationDataFn(e[a],e[a])(new d.Return);break;case 64:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.YieldReturn(b[a]));break;case 65:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.YieldReturn);break;case 66:this.$= -d.addLocationDataFn(e[a],e[a])(new d.Comment(b[a]));break;case 67:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Code(b[a-3],b[a],b[a-1]));break;case 68:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Code([],b[a],b[a-1]));break;case 69:this.$=d.addLocationDataFn(e[a],e[a])("func");break;case 70:this.$=d.addLocationDataFn(e[a],e[a])("boundfunc");break;case 73:case 110:this.$=d.addLocationDataFn(e[a],e[a])([]);break;case 74:case 111:case 130:case 150:case 182:case 224:this.$=d.addLocationDataFn(e[a], -e[a])([b[a]]);break;case 75:case 112:case 131:case 151:case 183:this.$=d.addLocationDataFn(e[a-2],e[a])(b[a-2].concat(b[a]));break;case 76:case 113:case 132:case 152:case 184:this.$=d.addLocationDataFn(e[a-3],e[a])(b[a-3].concat(b[a]));break;case 77:case 114:case 134:case 154:case 186:this.$=d.addLocationDataFn(e[a-5],e[a])(b[a-5].concat(b[a-2]));break;case 78:this.$=d.addLocationDataFn(e[a],e[a])(new d.Param(b[a]));break;case 79:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Param(b[a-1],null,!0)); -break;case 80:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Param(b[a-2],b[a]));break;case 81:case 189:this.$=d.addLocationDataFn(e[a],e[a])(new d.Expansion);break;case 86:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Splat(b[a-1]));break;case 88:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a-1].add(b[a]));break;case 89:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Value(b[a-1],[].concat(b[a])));break;case 99:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Access(b[a]));break;case 100:this.$=d.addLocationDataFn(e[a- -1],e[a])(new d.Access(b[a],"soak"));break;case 101:this.$=d.addLocationDataFn(e[a-1],e[a])([d.addLocationDataFn(e[a-1])(new d.Access(new d.PropertyName("prototype"))),d.addLocationDataFn(e[a])(new d.Access(b[a]))]);break;case 102:this.$=d.addLocationDataFn(e[a-1],e[a])([d.addLocationDataFn(e[a-1])(new d.Access(new d.PropertyName("prototype"),"soak")),d.addLocationDataFn(e[a])(new d.Access(b[a]))]);break;case 103:this.$=d.addLocationDataFn(e[a],e[a])(new d.Access(new d.PropertyName("prototype"))); -break;case 106:this.$=d.addLocationDataFn(e[a-1],e[a])(d.extend(b[a],{soak:!0}));break;case 107:this.$=d.addLocationDataFn(e[a],e[a])(new d.Index(b[a]));break;case 108:this.$=d.addLocationDataFn(e[a],e[a])(new d.Slice(b[a]));break;case 109:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Obj(b[a-2],b[a-3].generated));break;case 115:this.$=d.addLocationDataFn(e[a],e[a])(new d.Class);break;case 116:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Class(null,null,b[a]));break;case 117:this.$=d.addLocationDataFn(e[a- -2],e[a])(new d.Class(null,b[a]));break;case 118:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Class(null,b[a-1],b[a]));break;case 119:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Class(b[a]));break;case 120:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Class(b[a-1],null,b[a]));break;case 121:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Class(b[a-2],b[a]));break;case 122:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Class(b[a-3],b[a-1],b[a]));break;case 123:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.ImportDeclaration(null, -b[a]));break;case 124:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.ImportDeclaration(new d.ImportClause(b[a-2],null),b[a]));break;case 125:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.ImportDeclaration(new d.ImportClause(null,b[a-2]),b[a]));break;case 126:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.ImportDeclaration(new d.ImportClause(null,new d.ImportSpecifierList([])),b[a]));break;case 127:this.$=d.addLocationDataFn(e[a-6],e[a])(new d.ImportDeclaration(new d.ImportClause(null,new d.ImportSpecifierList(b[a- -4])),b[a]));break;case 128:this.$=d.addLocationDataFn(e[a-5],e[a])(new d.ImportDeclaration(new d.ImportClause(b[a-4],b[a-2]),b[a]));break;case 129:this.$=d.addLocationDataFn(e[a-8],e[a])(new d.ImportDeclaration(new d.ImportClause(b[a-7],new d.ImportSpecifierList(b[a-4])),b[a]));break;case 133:case 153:case 169:case 185:this.$=d.addLocationDataFn(e[a-3],e[a])(b[a-2]);break;case 135:this.$=d.addLocationDataFn(e[a],e[a])(new d.ImportSpecifier(b[a]));break;case 136:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ImportSpecifier(b[a- -2],b[a]));break;case 137:this.$=d.addLocationDataFn(e[a],e[a])(new d.ImportSpecifier(new d.Literal(b[a])));break;case 138:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ImportSpecifier(new d.Literal(b[a-2]),b[a]));break;case 139:this.$=d.addLocationDataFn(e[a],e[a])(new d.ImportDefaultSpecifier(b[a]));break;case 140:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ImportNamespaceSpecifier(new d.Literal(b[a-2]),b[a]));break;case 141:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportNamedDeclaration(new d.ExportSpecifierList([]))); -break;case 142:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.ExportNamedDeclaration(new d.ExportSpecifierList(b[a-2])));break;case 143:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.ExportNamedDeclaration(b[a]));break;case 144:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.ExportNamedDeclaration(new d.Assign(b[a-2],b[a],null,{moduleDeclaration:"export"})));break;case 145:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.ExportNamedDeclaration(new d.Assign(b[a-3],b[a],null,{moduleDeclaration:"export"}))); -break;case 146:this.$=d.addLocationDataFn(e[a-5],e[a])(new d.ExportNamedDeclaration(new d.Assign(b[a-4],b[a-1],null,{moduleDeclaration:"export"})));break;case 147:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportDefaultDeclaration(b[a]));break;case 148:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.ExportAllDeclaration(new d.Literal(b[a-2]),b[a]));break;case 149:this.$=d.addLocationDataFn(e[a-6],e[a])(new d.ExportNamedDeclaration(new d.ExportSpecifierList(b[a-4]),b[a]));break;case 155:this.$=d.addLocationDataFn(e[a], -e[a])(new d.ExportSpecifier(b[a]));break;case 156:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportSpecifier(b[a-2],b[a]));break;case 157:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportSpecifier(b[a-2],new d.Literal(b[a])));break;case 158:this.$=d.addLocationDataFn(e[a],e[a])(new d.ExportSpecifier(new d.Literal(b[a])));break;case 159:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportSpecifier(new d.Literal(b[a-2]),b[a]));break;case 160:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.TaggedTemplateCall(b[a- -2],b[a],b[a-1]));break;case 161:case 162:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Call(b[a-2],b[a],b[a-1]));break;case 164:this.$=d.addLocationDataFn(e[a],e[a])(new d.SuperCall);break;case 165:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.SuperCall(b[a]));break;case 166:this.$=d.addLocationDataFn(e[a],e[a])(!1);break;case 167:this.$=d.addLocationDataFn(e[a],e[a])(!0);break;case 168:this.$=d.addLocationDataFn(e[a-1],e[a])([]);break;case 170:case 171:this.$=d.addLocationDataFn(e[a],e[a])(new d.Value(new d.ThisLiteral)); -break;case 172:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Value(d.addLocationDataFn(e[a-1])(new d.ThisLiteral),[d.addLocationDataFn(e[a])(new d.Access(b[a]))],"this"));break;case 173:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Arr([]));break;case 174:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Arr(b[a-2]));break;case 175:this.$=d.addLocationDataFn(e[a],e[a])("inclusive");break;case 176:this.$=d.addLocationDataFn(e[a],e[a])("exclusive");break;case 177:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Range(b[a- -3],b[a-1],b[a-2]));break;case 178:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Range(b[a-2],b[a],b[a-1]));break;case 179:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Range(b[a-1],null,b[a]));break;case 180:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Range(null,b[a],b[a-1]));break;case 181:this.$=d.addLocationDataFn(e[a],e[a])(new d.Range(null,null,b[a]));break;case 191:this.$=d.addLocationDataFn(e[a-2],e[a])([].concat(b[a-2],b[a]));break;case 192:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Try(b[a])); -break;case 193:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Try(b[a-1],b[a][0],b[a][1]));break;case 194:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Try(b[a-2],null,null,b[a]));break;case 195:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Try(b[a-3],b[a-2][0],b[a-2][1],b[a]));break;case 196:this.$=d.addLocationDataFn(e[a-2],e[a])([b[a-1],b[a]]);break;case 197:this.$=d.addLocationDataFn(e[a-2],e[a])([d.addLocationDataFn(e[a-1])(new d.Value(b[a-1])),b[a]]);break;case 198:this.$=d.addLocationDataFn(e[a- -1],e[a])([null,b[a]]);break;case 199:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Throw(b[a]));break;case 200:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Parens(b[a-1]));break;case 201:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Parens(b[a-2]));break;case 202:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.While(b[a]));break;case 203:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.While(b[a-2],{guard:b[a]}));break;case 204:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.While(b[a],{invert:!0}));break; -case 205:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.While(b[a-2],{invert:!0,guard:b[a]}));break;case 206:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a-1].addBody(b[a]));break;case 207:case 208:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a].addBody(d.addLocationDataFn(e[a-1])(d.Block.wrap([b[a-1]]))));break;case 209:this.$=d.addLocationDataFn(e[a],e[a])(b[a]);break;case 210:this.$=d.addLocationDataFn(e[a-1],e[a])((new d.While(d.addLocationDataFn(e[a-1])(new d.BooleanLiteral("true")))).addBody(b[a])); -break;case 211:this.$=d.addLocationDataFn(e[a-1],e[a])((new d.While(d.addLocationDataFn(e[a-1])(new d.BooleanLiteral("true")))).addBody(d.addLocationDataFn(e[a])(d.Block.wrap([b[a]]))));break;case 212:case 213:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.For(b[a-1],b[a]));break;case 214:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.For(b[a],b[a-1]));break;case 215:this.$=d.addLocationDataFn(e[a-1],e[a])({source:d.addLocationDataFn(e[a])(new d.Value(b[a]))});break;case 216:this.$=d.addLocationDataFn(e[a- -3],e[a])({source:d.addLocationDataFn(e[a-2])(new d.Value(b[a-2])),step:b[a]});break;case 217:d=d.addLocationDataFn(e[a-1],e[a]);b[a].own=b[a-1].own;b[a].ownTag=b[a-1].ownTag;b[a].name=b[a-1][0];b[a].index=b[a-1][1];this.$=d(b[a]);break;case 218:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a]);break;case 219:wa=d.addLocationDataFn(e[a-2],e[a]);b[a].own=!0;b[a].ownTag=d.addLocationDataFn(e[a-1])(new d.Literal(b[a-1]));this.$=wa(b[a]);break;case 225:this.$=d.addLocationDataFn(e[a-2],e[a])([b[a-2],b[a]]); -break;case 226:this.$=d.addLocationDataFn(e[a-1],e[a])({source:b[a]});break;case 227:this.$=d.addLocationDataFn(e[a-1],e[a])({source:b[a],object:!0});break;case 228:this.$=d.addLocationDataFn(e[a-3],e[a])({source:b[a-2],guard:b[a]});break;case 229:this.$=d.addLocationDataFn(e[a-3],e[a])({source:b[a-2],guard:b[a],object:!0});break;case 230:this.$=d.addLocationDataFn(e[a-3],e[a])({source:b[a-2],step:b[a]});break;case 231:this.$=d.addLocationDataFn(e[a-5],e[a])({source:b[a-4],guard:b[a-2],step:b[a]}); -break;case 232:this.$=d.addLocationDataFn(e[a-5],e[a])({source:b[a-4],step:b[a-2],guard:b[a]});break;case 233:this.$=d.addLocationDataFn(e[a-1],e[a])({source:b[a],from:!0});break;case 234:this.$=d.addLocationDataFn(e[a-3],e[a])({source:b[a-2],guard:b[a],from:!0});break;case 235:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Switch(b[a-3],b[a-1]));break;case 236:this.$=d.addLocationDataFn(e[a-6],e[a])(new d.Switch(b[a-5],b[a-3],b[a-1]));break;case 237:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Switch(null, -b[a-1]));break;case 238:this.$=d.addLocationDataFn(e[a-5],e[a])(new d.Switch(null,b[a-3],b[a-1]));break;case 240:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a-1].concat(b[a]));break;case 241:this.$=d.addLocationDataFn(e[a-2],e[a])([[b[a-1],b[a]]]);break;case 242:this.$=d.addLocationDataFn(e[a-3],e[a])([[b[a-2],b[a-1]]]);break;case 243:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.If(b[a-1],b[a],{type:b[a-2]}));break;case 244:this.$=d.addLocationDataFn(e[a-4],e[a])(b[a-4].addElse(d.addLocationDataFn(e[a- -2],e[a])(new d.If(b[a-1],b[a],{type:b[a-2]}))));break;case 246:this.$=d.addLocationDataFn(e[a-2],e[a])(b[a-2].addElse(b[a]));break;case 247:case 248:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.If(b[a],d.addLocationDataFn(e[a-2])(d.Block.wrap([b[a-2]])),{type:b[a-1],statement:!0}));break;case 251:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("-",b[a]));break;case 252:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("+",b[a]));break;case 253:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("--", -b[a]));break;case 254:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("++",b[a]));break;case 255:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("--",b[a-1],null,!0));break;case 256:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("++",b[a-1],null,!0));break;case 257:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Existence(b[a-1]));break;case 258:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Op("+",b[a-2],b[a]));break;case 259:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Op("-",b[a-2],b[a]));break; -case 260:case 261:case 262:case 263:case 264:case 265:case 266:case 267:case 268:case 269:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Op(b[a-1],b[a-2],b[a]));break;case 270:e=d.addLocationDataFn(e[a-2],e[a]);b="!"===b[a-1].charAt(0)?(new d.Op(b[a-1].slice(1),b[a-2],b[a])).invert():new d.Op(b[a-1],b[a-2],b[a]);this.$=e(b);break;case 271:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Assign(b[a-2],b[a],b[a-1]));break;case 272:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Assign(b[a-4],b[a-1],b[a-3])); -break;case 273:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Assign(b[a-3],b[a],b[a-2]));break;case 274:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Extends(b[a-2],b[a]))}},table:[{1:[2,1],3:1,4:2,5:3,7:4,8:5,9:6,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k, -97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{1:[3]},{1:[2,2],6:qa},a(sa,[2,3]),a(sa,[2,6],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(sa,[2,7],{141:77,132:105,138:106,133:D,135:A,139:E,156:va}),a(sa,[2,8]),a(N,[2,14],{109:107,78:108,86:114,40:xa,41:xa,114:xa,82:ta,83:Na, -84:Fa,85:Ga,87:Ca,90:Pa,113:Ia}),a(N,[2,15],{86:114,109:117,78:118,82:ta,83:Na,84:Fa,85:Ga,87:Ca,90:Pa,113:Ia,114:xa}),a(N,[2,16]),a(N,[2,17]),a(N,[2,18]),a(N,[2,19]),a(N,[2,20]),a(N,[2,21]),a(N,[2,22]),a(N,[2,23]),a(N,[2,24]),a(N,[2,25]),a(N,[2,26]),a(Ea,[2,9]),a(Ea,[2,10]),a(Ea,[2,11]),a(Ea,[2,12]),a(Ea,[2,13]),a([1,6,32,42,131,133,135,139,156,163,164,165,166,167,168,169,170,171,172,173,174],Va,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26, -47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,7:120,8:122,12:b,28:ea,29:Ya,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:[1,119],62:z,63:l,67:c,68:w,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,137:q,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a(Ba,Ha,{55:[1,124]}),a(Ba,[2,95]),a(Ba,[2,96]),a(Ba,[2,97]),a(Ba,[2,98]),a(t,[2,163]),a([6,31,65,70],p,{64:125,71:126,72:127,33:129,60:130, -74:131,75:132,34:g,73:d,92:m,118:wa,119:e}),{30:135,31:Da},{7:137,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C, -158:T,159:v,160:Y,161:S,162:M},{7:138,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}, -{7:139,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:140,8:122,10:20,11:21,12:b, -13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{15:142,16:143,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57, -44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:144,60:71,74:53,75:54,77:141,79:28,80:29,81:30,92:m,111:31,112:L,117:V,118:X,119:G,130:W},{15:142,16:143,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:144,60:71,74:53,75:54,77:145,79:28,80:29,81:30,92:m,111:31,112:L,117:V,118:X,119:G,130:W},a(Ta,ua,{96:[1,149],161:[1,146],162:[1,147],175:[1,148]}),a(N,[2,245],{151:[1,150]}),{30:151,31:Da},{30:152,31:Da},a(N,[2,209]),{30:153,31:Da},{7:154,8:122,10:20,11:21, -12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,155],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Eb,[2,115],{47:27,79:28,80:29,81:30,111:31, -74:53,75:54,37:55,43:57,33:70,60:71,39:80,15:142,16:143,54:144,30:156,77:158,31:Da,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,92:m,96:[1,157],112:L,117:V,118:X,119:G,130:W}),{7:159,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P, -111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ea,Za,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,8:122,7:160,12:b,28:ea,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w, -92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,137:q,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a([1,6,31,32,42,70,94,131,133,135,139,156],[2,66]),{33:165,34:g,39:161,40:r,41:n,92:[1,164],98:162,99:163,104:Fb},{25:168,33:169,34:g,92:[1,167],95:k,103:[1,170],107:[1,171]},a(Ta,[2,92]),a(Ta,[2,93]),a(Ba,[2,40]),a(Ba,[2,41]),a(Ba,[2,42]),a(Ba,[2,43]),a(Ba,[2,44]),a(Ba,[2,45]),a(Ba,[2,46]),a(Ba,[2,47]),{4:172,5:3,7:4,8:5,9:6,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, -20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,31:[1,173],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:174,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14, -23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,116:176,117:V,118:X,119:G,120:Gb,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ba,[2,170]),a(Ba,[2,171],{35:181,36:Oa}),a([1,6,31,32,42,46,65,70,73,82, -83,84,85,87,89,90,94,113,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],[2,164],{110:183,114:sb}),{31:[2,69]},{31:[2,70]},a(La,[2,87]),a(La,[2,90]),{7:185,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K, -105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:186,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X, -119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:187,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43, -133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:189,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,30:188,31:Da,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44, -137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{33:194,34:g,60:195,74:196,75:197,80:190,92:m,118:wa,119:G,143:191,144:[1,192],145:193},{142:198,146:[1,199],147:[1,200],148:[1,201]},a([6,31,70,94],Hb,{39:80,93:202,56:203,57:204,59:205,11:206,37:207,33:208,35:209,60:210,34:g,36:Oa,38:h,40:r,41:n,62:z,118:wa}),a(Ib,[2,34]),a(Ib,[2,35]),a(Ba,[2,38]),{15:142,16:211,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:144,60:71, -74:53,75:54,77:212,79:28,80:29,81:30,92:m,111:31,112:L,117:V,118:X,119:G,130:W},a([1,6,29,31,32,40,41,42,55,58,65,70,73,82,83,84,85,87,89,90,94,96,102,113,114,115,120,122,131,133,134,135,139,140,146,147,148,156,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175],[2,32]),a(Jb,[2,36]),{4:213,5:3,7:4,8:5,9:6,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F, -50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(sa,[2,5],{7:4,8:5,9:6,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57, -33:70,60:71,141:77,39:80,5:214,12:b,28:u,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,133:D,135:A,137:q,139:E,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a(N,[2,257]),{7:215,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71, -61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:216,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w, -74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:217,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29, -81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:218,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31, -112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:219,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa, -129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:220,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A, -136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:221,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77, -149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:222,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T, -159:v,160:Y,161:S,162:M},{7:223,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:224, -8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:225,8:122,10:20,11:21,12:b,13:23, -14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:226,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, -20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:227,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16, -25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:228,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70, -34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,208]),a(N,[2,213]),{7:229,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g, -37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,207]),a(N,[2,212]),{39:230,40:r,41:n,110:231,114:sb},a(La,[2,88]),a(Kb,[2,167]),{35:232,36:Oa},{35:233,36:Oa},a(La,[2,103],{35:234,36:Oa}),{35:235,36:Oa},a(La, -[2,104]),{7:237,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Lb,74:53,75:54,77:40,79:28,80:29,81:30,88:236,91:238,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,121:239,122:tb,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y, -161:S,162:M},{86:242,87:Ca,90:Pa},{110:243,114:sb},a(La,[2,89]),a(sa,[2,65],{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,8:122,7:244,12:b,28:ea,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,133:Za,135:Za,139:Za,156:Za, -137:q,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a(Ma,[2,28],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:245,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P, -111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{132:105,133:D,135:A,138:106,139:E,141:77,156:va},a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,163,164,165,166,167,168,169,170,171,172,173,174],Va,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44, -138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,7:120,8:122,12:b,28:ea,29:Ya,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,137:q,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),{6:[1,247],7:246,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,248],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I, -49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a([6,31],Ja,{69:251,65:[1,249],70:Mb}),a(Sa,[2,74]),a(Sa,[2,78],{55:[1,253],73:[1,252]}),a(Sa,[2,81]),a(fb,[2,82]),a(fb,[2,83]),a(fb,[2,84]),a(fb,[2,85]),{35:181,36:Oa},{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7, -16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,116:176,117:V,118:X,119:G,120:Gb,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,68]),{4:256,5:3,7:4,8:5,9:6, -10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,32:[1,255],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a([1,6,31,32,42,65,70,73,89,94, -115,120,122,131,133,134,135,139,140,156,159,160,164,165,166,167,168,169,170,171,172,173,174],[2,249],{141:77,132:102,138:103,163:fa}),a(ab,[2,250],{141:77,132:102,138:103,163:fa,165:ga}),a(ab,[2,251],{141:77,132:102,138:103,163:fa,165:ga}),a(ab,[2,252],{141:77,132:102,138:103,163:fa,165:ga}),a(N,[2,253],{40:ua,41:ua,82:ua,83:ua,84:ua,85:ua,87:ua,90:ua,113:ua,114:ua}),a(Kb,xa,{109:107,78:108,86:114,82:ta,83:Na,84:Fa,85:Ga,87:Ca,90:Pa,113:Ia}),{78:118,82:ta,83:Na,84:Fa,85:Ga,86:114,87:Ca,90:Pa,109:117, -113:Ia,114:xa},a(Nb,Ha),a(N,[2,254],{40:ua,41:ua,82:ua,83:ua,84:ua,85:ua,87:ua,90:ua,113:ua,114:ua}),a(N,[2,255]),a(N,[2,256]),{6:[1,259],7:257,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,258],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U, -130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:260,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44, -137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{30:261,31:Da,155:[1,262]},a(N,[2,192],{126:263,127:[1,264],128:[1,265]}),a(N,[2,206]),a(N,[2,214]),{31:[1,266],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{150:267,152:268,153:gb},a(N,[2,116]),{7:270,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea, -33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Eb,[2,119],{30:271,31:Da,40:ua,41:ua,82:ua,83:ua,84:ua,85:ua,87:ua,90:ua,113:ua,114:ua,96:[1,272]}),a(Ma,[2,199],{141:77,132:102,138:103,159:ma,160:Z, -163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ea,bb,{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ea,[2,123]),{29:[1,273],70:[1,274]},{29:[1,275]},{31:hb,33:280,34:g,94:[1,276],100:277,101:278,103:Wa},a([29,70],[2,139]),{102:[1,282]},{31:ub,33:287,34:g,94:[1,283],103:cb,106:284,108:285},a(Ea,[2,143]),{55:[1,289]},{7:290,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, -20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{29:[1,291]},{6:qa,131:[1,292]},{4:293,5:3,7:4,8:5,9:6,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9, -18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a([6,31,70,120],Ob,{141:77,132:102,138:103,121:294,73:[1,295],122:tb,133:D,135:A,139:E, -156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(vb,[2,173]),a([6,31,120],Ja,{69:296,70:ib}),a(Qa,[2,182]),{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31, -112:L,116:298,117:V,118:X,119:G,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Qa,[2,188]),a(Qa,[2,189]),a(Pb,[2,172]),a(Pb,[2,33]),a(t,[2,165]),{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua, -74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,115:[1,299],116:300,117:V,118:X,119:G,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{30:301,31:Da,132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(Qb,[2,202],{141:77,132:102,138:103,133:D,134:[1,302],135:A,139:E,159:ma,160:Z,163:fa,164:ia, -165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Qb,[2,204],{141:77,132:102,138:103,133:D,134:[1,303],135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(N,[2,210]),a(Xa,[2,211],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,156,159,160,163,164,165,166,167,168, -169,170,171,172,173,174],[2,215],{140:[1,304]}),a(jb,[2,218]),{33:194,34:g,60:195,74:196,75:197,92:m,118:wa,119:e,143:305,145:193},a(jb,[2,224],{70:[1,306]}),a(kb,[2,220]),a(kb,[2,221]),a(kb,[2,222]),a(kb,[2,223]),a(N,[2,217]),{7:307,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40, -79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:308,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k, -97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:309,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V, -118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(lb,Ja,{69:310,70:Rb}),a(Aa,[2,111]),a(Aa,[2,51],{58:[1,312]}),a(Sb,[2,60],{55:[1,313]}),a(Aa,[2,56]),a(Sb,[2,61]),a(wb,[2,57]),a(wb,[2,58]),a(wb,[2,59]),{46:[1,314],78:118,82:ta,83:Na,84:Fa,85:Ga,86:114,87:Ca,90:Pa,109:117,113:Ia,114:xa},a(Nb,ua),{6:qa,42:[1,315]},a(sa,[2,4]),a(Tb,[2,258],{141:77,132:102,138:103,163:fa,164:ia,165:ga}),a(Tb,[2,259],{141:77, -132:102,138:103,163:fa,164:ia,165:ga}),a(ab,[2,260],{141:77,132:102,138:103,163:fa,165:ga}),a(ab,[2,261],{141:77,132:102,138:103,163:fa,165:ga}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,166,167,168,169,170,171,172,173,174],[2,262],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,167,168,169,170,171,172,173],[2,263],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,174:da}), -a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,168,169,170,171,172,173],[2,264],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,169,170,171,172,173],[2,265],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,170,171,172,173],[2,266],{141:77,132:102,138:103,159:ma,160:Z, -163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,171,172,173],[2,267],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,172,173],[2,268],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134, -135,139,140,156,173],[2,269],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,167,168,169,170,171,172,173,174],[2,270],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja}),a(Xa,[2,248],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Xa,[2,247],{141:77,132:102, -138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(t,[2,160]),a(t,[2,161]),a(La,[2,99]),a(La,[2,100]),a(La,[2,101]),a(La,[2,102]),{89:[1,316]},{73:Lb,89:[2,107],121:317,122:tb,132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{89:[2,108]},{7:318,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15, -24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,89:[2,181],92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ub,[2,175]),a(Ub,Vb),a(La,[2,106]),a(t,[2,162]),a(sa,[2,64],{141:77,132:102,138:103,133:bb,135:bb,139:bb,156:bb, -159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,29],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,48],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:319,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g, -37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:320,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57, -44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{66:321,67:c,68:w},a(Ra,db,{72:127,33:129,60:130,74:131,75:132,71:322,34:g,73:d,92:m,118:wa,119:e}),{6:Wb,31:Xb},a(Sa,[2,79]),{7:325,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, -20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Qa,Ob,{141:77,132:102,138:103,73:[1,326],133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga, -166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Yb,[2,30]),{6:qa,32:[1,327]},a(Ma,[2,271],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:328,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40, -79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:329,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k, -97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ma,[2,274],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(N,[2,246]),{7:330,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27, -48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,193],{127:[1,331]}),{30:332,31:Da},{30:335,31:Da,33:333,34:g,75:334,92:m},{150:336,152:268,153:gb},{32:[1,337],151:[1,338],152:339,153:gb},a(mb,[2,239]),{7:341,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8, -17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,124:340,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Zb,[2,117],{141:77,132:102,138:103,30:342,31:Da,133:D,135:A,139:E,159:ma, -160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(N,[2,120]),{7:343,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45, -139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{39:344,40:r,41:n},{92:[1,346],99:345,104:Fb},{39:347,40:r,41:n},{29:[1,348]},a(lb,Ja,{69:349,70:nb}),a(Aa,[2,130]),{31:hb,33:280,34:g,100:351,101:278,103:Wa},a(Aa,[2,135],{102:[1,352]}),a(Aa,[2,137],{102:[1,353]}),{33:354,34:g},a(Ea,[2,141]),a(lb,Ja,{69:355,70:xb}),a(Aa,[2,150]),{31:ub,33:287,34:g,103:cb,106:357,108:285},a(Aa,[2,155],{102:[1,358]}),a(Aa,[2,158],{102:[1,359]}),{6:[1,361],7:360,8:122,10:20,11:21,12:b,13:23,14:24, -15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,362],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(yb,[2,147],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma, -160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{39:363,40:r,41:n},a(Ba,[2,200]),{6:qa,32:[1,364]},{7:365,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W, -132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a([12,28,34,38,40,41,44,45,48,49,50,51,52,53,61,62,63,67,68,92,95,97,105,112,117,118,119,125,129,130,133,135,137,139,149,155,157,158,159,160,161,162],Vb,{6:eb,31:eb,70:eb,120:eb}),{6:ob,31:pb,120:[1,366]},a([6,31,32,115,120],db,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43, -136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,8:122,76:179,7:254,123:369,12:b,28:ea,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w,73:Ua,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,133:D,135:A,137:q,139:E,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a(Ra,Ja,{69:370,70:ib}),a(t,[2,168]),a([6,31,115],Ja,{69:371,70:ib}),a($b,[2,243]),{7:372,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14, -23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:373,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19, -28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:374,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h, -39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(jb,[2,219]),{33:194,34:g,60:195,74:196,75:197,92:m,118:wa,119:e,145:375},a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,135,139,156],[2,226],{141:77,132:102,138:103,134:[1, -376],140:[1,377],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(zb,[2,227],{141:77,132:102,138:103,134:[1,378],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(zb,[2,233],{141:77,132:102,138:103,134:[1,379],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{6:ac,31:bc,94:[1,380]},a(Ab,db,{39:80,57:204,59:205,11:206,37:207,33:208,35:209,60:210,56:383, -34:g,36:Oa,38:h,40:r,41:n,62:z,118:wa}),{7:384,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,385],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v, -160:Y,161:S,162:M},{7:386,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,387],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}, -a(Ba,[2,39]),a(Jb,[2,37]),a(La,[2,105]),{7:388,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,89:[2,179],92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v, -160:Y,161:S,162:M},{89:[2,180],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(Ma,[2,49],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{32:[1,389],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{30:390,31:Da},a(Sa,[2,75]),{33:129, -34:g,60:130,71:391,72:127,73:d,74:131,75:132,92:m,118:wa,119:e},a(cc,p,{71:126,72:127,33:129,60:130,74:131,75:132,64:392,34:g,73:d,92:m,118:wa,119:e}),a(Sa,[2,80],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Qa,eb),a(Yb,[2,31]),{32:[1,393],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(Ma,[2,273], -{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{30:394,31:Da,132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{30:395,31:Da},a(N,[2,194]),{30:396,31:Da},{30:397,31:Da},a(Bb,[2,198]),{32:[1,398],151:[1,399],152:339,153:gb},a(N,[2,237]),{30:400,31:Da},a(mb,[2,240]),{30:401,31:Da,70:[1,402]},a(dc,[2,190],{141:77,132:102,138:103,133:D, -135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(N,[2,118]),a(Zb,[2,121],{141:77,132:102,138:103,30:403,31:Da,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ea,[2,124]),{29:[1,404]},{31:hb,33:280,34:g,100:405,101:278,103:Wa},a(Ea,[2,125]),{39:406,40:r,41:n},{6:qb,31:rb,94:[1,407]},a(Ab,db,{33:280,101:410,34:g,103:Wa}),a(Ra,Ja,{69:411,70:nb}),{33:412,34:g}, -{33:413,34:g},{29:[2,140]},{6:Cb,31:Db,94:[1,414]},a(Ab,db,{33:287,108:417,34:g,103:cb}),a(Ra,Ja,{69:418,70:xb}),{33:419,34:g,103:[1,420]},{33:421,34:g},a(yb,[2,144],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:422,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q, -51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:423,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R, -62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ea,[2,148]),{131:[1,424]},{120:[1,425],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(vb,[2,174]),{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9, -18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,123:426,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, -20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,116:427,117:V,118:X,119:G,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Qa,[2,183]),{6:ob,31:pb,32:[1,428]},{6:ob,31:pb,115:[1,429]}, -a(Xa,[2,203],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Xa,[2,205],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Xa,[2,216],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(jb,[2,225]),{7:430,8:122,10:20,11:21, -12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:431,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9, -18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:432,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14, -23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:433,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19, -28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(vb,[2,109]),{11:206,33:208,34:g,35:209,36:Oa,37:207,38:h,39:80,40:r,41:n,56:434,57:204,59:205,60:210,62:z,118:wa},a(cc,Hb,{39:80,56:203,57:204, -59:205,11:206,37:207,33:208,35:209,60:210,93:435,34:g,36:Oa,38:h,40:r,41:n,62:z,118:wa}),a(Aa,[2,112]),a(Aa,[2,52],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:436,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l, -66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Aa,[2,54],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:437,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18, -27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{89:[2,178],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na, -173:ra,174:da},a(N,[2,50]),a(N,[2,67]),a(Sa,[2,76]),a(Ra,Ja,{69:438,70:Mb}),a(N,[2,272]),a($b,[2,244]),a(N,[2,195]),a(Bb,[2,196]),a(Bb,[2,197]),a(N,[2,235]),{30:439,31:Da},{32:[1,440]},a(mb,[2,241],{6:[1,441]}),{7:442,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30, -92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,122]),{39:443,40:r,41:n},a(lb,Ja,{69:444,70:nb}),a(Ea,[2,126]),{29:[1,445]},{33:280,34:g,101:446,103:Wa},{31:hb,33:280,34:g,100:447,101:278,103:Wa},a(Aa,[2,131]),{6:qb,31:rb,32:[1,448]},a(Aa,[2,136]),a(Aa,[2,138]),a(Ea,[2,142],{29:[1,449]}),{33:287,34:g,103:cb,108:450},{31:ub,33:287,34:g,103:cb,106:451,108:285}, -a(Aa,[2,151]),{6:Cb,31:Db,32:[1,452]},a(Aa,[2,156]),a(Aa,[2,157]),a(Aa,[2,159]),a(yb,[2,145],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{32:[1,453],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(Ba,[2,201]),a(Ba,[2,177]),a(Qa,[2,184]),a(Ra,Ja,{69:454,70:ib}),a(Qa,[2,185]),a(t,[2,169]),a([1,6,31,32,42, -65,70,73,89,94,115,120,122,131,133,134,135,139,156],[2,228],{141:77,132:102,138:103,140:[1,455],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(zb,[2,230],{141:77,132:102,138:103,134:[1,456],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,229],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,234],{141:77,132:102, -138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Aa,[2,113]),a(Ra,Ja,{69:457,70:Rb}),{32:[1,458],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{32:[1,459],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{6:Wb,31:Xb,32:[1,460]},{32:[1,461]},a(N, -[2,238]),a(mb,[2,242]),a(dc,[2,191],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ea,[2,128]),{6:qb,31:rb,94:[1,462]},{39:463,40:r,41:n},a(Aa,[2,132]),a(Ra,Ja,{69:464,70:nb}),a(Aa,[2,133]),{39:465,40:r,41:n},a(Aa,[2,152]),a(Ra,Ja,{69:466,70:xb}),a(Aa,[2,153]),a(Ea,[2,146]),{6:ob,31:pb,32:[1,467]},{7:468,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16, -25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:469,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70, -34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{6:ac,31:bc,32:[1,470]},a(Aa,[2,53]),a(Aa,[2,55]),a(Sa,[2,77]),a(N,[2,236]),{29:[1,471]},a(Ea,[2,127]),{6:qb,31:rb,32:[1,472]},a(Ea,[2,149]),{6:Cb,31:Db,32:[1, -473]},a(Qa,[2,186]),a(Ma,[2,231],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,232],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Aa,[2,114]),{39:474,40:r,41:n},a(Aa,[2,134]),a(Aa,[2,154]),a(Ea,[2,129])],defaultActions:{68:[2,69],69:[2,70],238:[2,108],354:[2,140]},parseError:function(a,d){if(d.recoverable)this.trace(a);else{var e=function(a, -d){this.message=a;this.hash=d};e.prototype=Error;throw new e(a,d);}},parse:function(a){var d=[0],e=[null],b=[],p=this.table,t="",wa=0,c=0,g=0,Da=b.slice.call(arguments,1),k=Object.create(this.lexer),h={};for(f in this.yy)Object.prototype.hasOwnProperty.call(this.yy,f)&&(h[f]=this.yy[f]);k.setInput(a,h);h.lexer=k;h.parser=this;"undefined"==typeof k.yylloc&&(k.yylloc={});var f=k.yylloc;b.push(f);var l=k.options&&k.options.ranges;this.parseError="function"===typeof h.parseError?h.parseError:Object.getPrototypeOf(this).parseError; -for(var m,Ta,Ha,n,ua={},y,w;;){Ha=d[d.length-1];if(this.defaultActions[Ha])n=this.defaultActions[Ha];else{if(null===m||"undefined"==typeof m)m=k.lex()||1,"number"!==typeof m&&(m=this.symbols_[m]||m);n=p[Ha]&&p[Ha][m]}if("undefined"===typeof n||!n.length||!n[0]){w=[];for(y in p[Ha])this.terminals_[y]&&2=ta?this.wrapInBraces(d):d};b.prototype.compileRoot=function(a){var d,b;a.indent=a.bare?"":Ca;a.level=N;this.spaced=!0;a.scope=new xa(null,this,null,null!=(b=a.referencedVars)?b:[]);var e=a.locals||[];b=0;for(d=e.length;b=Fa?this.wrapInBraces(d): -d};return b}(w);f.StringLiteral=D=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);return b}(z);f.RegexLiteral=X=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);return b}(z);f.PassthroughLiteral=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);return b}(z);f.IdentifierLiteral=x=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.isAssignable=ha; -return b}(z);f.PropertyName=L=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.isAssignable=ha;return b}(z);f.StatementLiteral=W=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.isStatement=ha;b.prototype.makeReturn=na;b.prototype.jumps=function(a){if("break"===this.value&&!(null!=a&&a.loop||null!=a&&a.block)||"continue"===this.value&&(null==a||!a.loop))return this};b.prototype.compileNode=function(a){return[this.makeCode(""+ -this.tab+this.value+";")]};return b}(z);f.ThisLiteral=E=function(a){function b(){b.__super__.constructor.call(this,"this")}v(b,a);b.prototype.compileNode=function(a){var d;a=null!=(d=a.scope.method)&&d.bound?a.scope.method.context:this.value;return[this.makeCode(a)]};return b}(z);f.UndefinedLiteral=ca=function(a){function b(){b.__super__.constructor.call(this,"undefined")}v(b,a);b.prototype.compileNode=function(a){return[this.makeCode(a.level>=Ga?"(void 0)":"void 0")]};return b}(z);f.NullLiteral= -c=function(a){function b(){b.__super__.constructor.call(this,"null")}v(b,a);return b}(z);f.BooleanLiteral=b=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);return b}(z);f.Return=G=function(a){function b(a){this.expression=a}v(b,a);b.prototype.children=["expression"];b.prototype.isStatement=ha;b.prototype.makeReturn=na;b.prototype.jumps=na;b.prototype.compileToFragments=function(a,d){var p;var e=null!=(p=this.expression)?p.makeReturn():void 0;return!e||e instanceof -b?b.__super__.compileToFragments.call(this,a,d):e.compileToFragments(a,d)};b.prototype.compileNode=function(a){var b=[];b.push(this.makeCode(this.tab+("return"+(this.expression?" ":""))));this.expression&&(b=b.concat(this.expression.compileToFragments(a,Ka)));b.push(this.makeCode(";"));return b};return b}(sa);f.YieldReturn=T=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.compileNode=function(a){null==a.scope.parent&&this.error("yield can only occur inside functions"); -return b.__super__.compileNode.apply(this,arguments)};return b}(G);f.Value=C=function(a){function t(a,b,wa){if(!b&&a instanceof t)return a;this.base=a;this.properties=b||[];wa&&(this[wa]=!0);return this}v(t,a);t.prototype.children=["base","properties"];t.prototype.add=function(a){this.properties=this.properties.concat(a);return this};t.prototype.hasProperties=function(){return!!this.properties.length};t.prototype.bareLiteral=function(a){return!this.properties.length&&this.base instanceof a};t.prototype.isArray= -function(){return this.bareLiteral(q)};t.prototype.isRange=function(){return this.bareLiteral(V)};t.prototype.isComplex=function(){return this.hasProperties()||this.base.isComplex()};t.prototype.isAssignable=function(){return this.hasProperties()||this.base.isAssignable()};t.prototype.isNumber=function(){return this.bareLiteral(w)};t.prototype.isString=function(){return this.bareLiteral(D)};t.prototype.isRegex=function(){return this.bareLiteral(X)};t.prototype.isUndefined=function(){return this.bareLiteral(ca)}; -t.prototype.isNull=function(){return this.bareLiteral(c)};t.prototype.isBoolean=function(){return this.bareLiteral(b)};t.prototype.isAtomic=function(){var a;var b=this.properties.concat(this.base);var wa=0;for(a=b.length;wathis.properties.length&&!this.base.isComplex()&&(null==p||!p.isComplex()))return[this,this];b=new t(this.base,this.properties.slice(0,-1));if(b.isComplex()){var e=new x(a.scope.freeVariable("base"));b=new t(new P(new y(e, -b)))}if(!p)return[b,e];if(p.isComplex()){var c=new x(a.scope.freeVariable("name"));p=new R(new y(c,p.index));c=new R(c)}return[b.add(p),new t(e||b.base,[c||p])]};t.prototype.compileNode=function(a){var b;this.base.front=this.front;var p=this.properties;var e=this.base.compileToFragments(a,p.length?Ga:null);p.length&&Pa.test(da(e))&&e.push(this.makeCode("."));var t=0;for(b=p.length;t=Math.abs(this.fromNum-this.toNum)){var c=function(){e=[];for(var a=p=this.fromNum,b=this.toNum;p<=b?a<=b:a>=b;p<=b?a++:a--)e.push(a);return e}.apply(this);this.exclusive&&c.pop();return[this.makeCode("["+c.join(", ")+"]")]}var t=this.tab+Ca;var f=a.scope.freeVariable("i",{single:!0});var g=a.scope.freeVariable("results");var k="\n"+t+g+" \x3d [];";if(b)a.index=f,b=da(this.compileNode(a));else{var h= -f+" \x3d "+this.fromC+(this.toC!==this.toVar?", "+this.toC:"");b=this.fromVar+" \x3c\x3d "+this.toVar;b="var "+h+"; "+b+" ? "+f+" \x3c"+this.equals+" "+this.toVar+" : "+f+" \x3e"+this.equals+" "+this.toVar+"; "+b+" ? "+f+"++ : "+f+"--"}f="{ "+g+".push("+f+"); }\n"+t+"return "+g+";\n"+a.indent;a=function(a){return null!=a?a.contains(Va):void 0};if(a(this.from)||a(this.to))c=", arguments";return[this.makeCode("(function() {"+k+"\n"+t+"for ("+b+")"+f+"}).apply(this"+(null!=c?c:"")+")")]};return b}(sa); -f.Slice=aa=function(a){function b(a){this.range=a;b.__super__.constructor.call(this)}v(b,a);b.prototype.children=["range"];b.prototype.compileNode=function(a){var b=this.range;var p=b.to;var e=(b=b.from)&&b.compileToFragments(a,Ka)||[this.makeCode("0")];if(p){b=p.compileToFragments(a,Ka);var c=da(b);if(this.range.exclusive||-1!==+c)var t=", "+(this.range.exclusive?c:p.isNumber()?""+(+c+1):(b=p.compileToFragments(a,Ga),"+"+da(b)+" + 1 || 9e9"))}return[this.makeCode(".slice("+da(e)+(t||"")+")")]};return b}(sa); -f.Obj=m=function(a){function b(a,b){this.generated=null!=b?b:!1;this.objects=this.properties=a||[]}v(b,a);b.prototype.children=["properties"];b.prototype.compileNode=function(a){var b,p,e;var c=this.properties;if(this.generated){var t=0;for(b=c.length;t= -Fa?this.wrapInBraces(t):t}var h=g[0];1===e&&h instanceof H&&h.error("Destructuring assignment has no target");var m=this.variable.isObject();if(p&&1===e&&!(h instanceof U)){var l=null;if(h instanceof b&&"object"===h.context){t=h;var n=t.variable;var q=n.base;h=t.value;h instanceof b&&(l=h.value,h=h.variable)}else h instanceof b&&(l=h.value,h=h.variable),q=m?h["this"]?h.properties[0].name:new L(h.unwrap().value):new w(0);var r=q.unwrap()instanceof L;f=new C(f);f.properties.push(new (r?qa:R)(q));(c= -za(h.unwrap().value))&&h.error(c);l&&(f=new k("?",f,l));return(new b(h,f,null,{param:this.param})).compileToFragments(a,N)}var v=f.compileToFragments(a,ta);var y=da(v);t=[];n=!1;f.unwrap()instanceof x&&!this.variable.assigns(y)||(t.push([this.makeCode((l=a.scope.freeVariable("ref"))+" \x3d ")].concat(M.call(v))),v=[this.makeCode(l)],y=l);l=f=0;for(d=g.length;fN?this.wrapInBraces(e):e};return b}(sa);f.Code=h=function(b){function c(b,d,c){this.params=b||[];this.body=d||new a;this.bound="boundfunc"===c;this.isGenerator=!!this.body.contains(function(a){return a instanceof k&&a.isYield()|| -a instanceof T})}v(c,b);c.prototype.children=["params","body"];c.prototype.isStatement=function(){return!!this.ctor};c.prototype.jumps=ka;c.prototype.makeScope=function(a){return new xa(a,this.body,this)};c.prototype.compileNode=function(b){var d,f,e,g;this.bound&&null!=(d=b.scope.method)&&d.bound&&(this.context=b.scope.method.context);if(this.bound&&!this.context)return this.context="_this",d=new c([new K(new x(this.context))],new a([this])),d=new ya(d,[new E]),d.updateLocationDataIfMissing(this.locationData), -d.compileNode(b);b.scope=la(b,"classScope")||this.makeScope(b.scope);b.scope.shared=la(b,"sharedScope");b.indent+=Ca;delete b.bare;delete b.isExistentialEquals;d=[];var p=[];var h=this.params;var t=0;for(e=h.length;t=Ga?this.wrapInBraces(p):p};c.prototype.eachParamName=function(a){var b;var c=this.params;var e=[];var f=0;for(b=c.length;f=d.length)return[];if(1===d.length)return e=d[0],d=e.compileToFragments(a,ta),c?d:[].concat(e.makeCode(Ia("slice",a)+".call("),d,e.makeCode(")"));c=d.slice(f);var h=g=0;for(p=c.length;g< -p;h=++g){e=c[h];var k=e.compileToFragments(a,ta);c[h]=e instanceof b?[].concat(e.makeCode(Ia("slice",a)+".call("),k,e.makeCode(")")):[].concat(e.makeCode("["),k,e.makeCode("]"))}if(0===f)return e=d[0],a=e.joinFragmentArrays(c.slice(1),", "),c[0].concat(e.makeCode(".concat("),a,e.makeCode(")"));g=d.slice(0,f);p=[];k=0;for(h=g.length;k=Ga)return(new P(this)).compileToFragments(a);var f="+"===c||"-"===c;("new"===c||"typeof"===c||"delete"===c||f&&this.first instanceof b&&this.first.operator===c)&&d.push([this.makeCode(" ")]);if(f&&this.first instanceof b||"new"===c&&this.first.isStatement(a))this.first=new P(this.first);d.push(this.first.compileToFragments(a,Fa));this.flip&&d.reverse();return this.joinFragmentArrays(d,"")};b.prototype.compileYield=function(a){var b; -var d=[];var c=this.operator;null==a.scope.parent&&this.error("yield can only occur inside functions");0<=S.call(Object.keys(this.first),"expression")&&!(this.first instanceof ba)?null!=this.first.expression&&d.push(this.first.expression.compileToFragments(a,Fa)):(a.level>=Ka&&d.push([this.makeCode("(")]),d.push([this.makeCode(c)]),""!==(null!=(b=this.first.base)?b.value:void 0)&&d.push([this.makeCode(" ")]),d.push(this.first.compileToFragments(a,Fa)),a.level>=Ka&&d.push([this.makeCode(")")]));return this.joinFragmentArrays(d, -"")};b.prototype.compilePower=function(a){var b=new C(new x("Math"),[new qa(new L("pow"))]);return(new ya(b,[this.first,this.second])).compileToFragments(a)};b.prototype.compileFloorDivision=function(a){var d=new C(new x("Math"),[new qa(new L("floor"))]);var c=this.second.isComplex()?new P(this.second):this.second;c=new b("/",this.first,c);return(new ya(d,[c])).compileToFragments(a)};b.prototype.compileModulo=function(a){var b=new C(new z(Ia("modulo",a)));return(new ya(b,[this.first,this.second])).compileToFragments(a)}; -b.prototype.toString=function(a){return b.__super__.toString.call(this,a,this.constructor.name+" "+this.operator)};return b}(sa);f.In=O=function(a){function b(a,b){this.object=a;this.array=b}v(b,a);b.prototype.children=["object","array"];b.prototype.invert=ra;b.prototype.compileNode=function(a){var b;if(this.array instanceof C&&this.array.isArray()&&this.array.base.objects.length){var c=this.array.base.objects;var e=0;for(b=c.length;e=c.length)?c:this.wrapInBraces(c)};return b}(sa); -f.StringWithInterpolations=A=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.compileNode=function(a){var d;if(!a.inTaggedTemplateCall)return b.__super__.compileNode.apply(this,arguments);var c=this.body.unwrap();var e=[];c.traverseChildren(!1,function(a){if(a instanceof D)e.push(a);else if(a instanceof P)return e.push(a),!1;return!0});c=[];c.push(this.makeCode("`"));var f=0;for(d=e.length;fh,this.step&&null!=h&&e||(d=n.freeVariable("len")),K=""+t+f+" \x3d 0, "+d+" \x3d "+A+".length",w=""+t+f+" \x3d "+A+".length - 1",d=f+" \x3c "+d,n=f+" \x3e\x3d 0",this.step?(null!=h?e&&(d= -n,K=w):(d=r+" \x3e 0 ? "+d+" : "+n,K="("+r+" \x3e 0 ? ("+K+") : "+w+")"),f=f+" +\x3d "+r):f=""+(q!==f?"++"+f:f+"++"),K=[this.makeCode(K+"; "+d+"; "+t+f)])}if(this.returns){var B=""+this.tab+c+" \x3d [];\n";var V="\n"+this.tab+"return "+c+";";l.makeReturn(c)}this.guard&&(1=Na?this.wrapInBraces(e):e};c.prototype.unfoldSoak=function(){return this.soak&&this};return c}(sa);var gc={extend:function(a){return"function(child, parent) { for (var key in parent) { if ("+Ia("hasProp",a)+".call(parent, key)) child[key] \x3d parent[key]; } function ctor() { this.constructor \x3d child; } ctor.prototype \x3d parent.prototype; child.prototype \x3d new ctor(); child.__super__ \x3d parent.prototype; return child; }"},bind:function(){return"function(fn, me){ return function(){ return fn.apply(me, arguments); }; }"}, -indexOf:function(){return"[].indexOf || function(item) { for (var i \x3d 0, l \x3d this.length; i \x3c l; i++) { if (i in this \x26\x26 this[i] \x3d\x3d\x3d item) return i; } return -1; }"},modulo:function(){return"function(a, b) { return (+a % (b \x3d +b) + b) % b; }"},hasProp:function(){return"{}.hasOwnProperty"},slice:function(){return"[].slice"}};var N=1;var Ka=2;var ta=3;var Na=4;var Fa=5;var Ga=6;var Ca=" ";var Pa=/^[+-]?\d+$/;var Ia=function(a,b){var c=b.scope.root;if(a in c.utilities)return c.utilities[a]; -var d=c.freeVariable(a);c.assign(d,gc[a](b));return c.utilities[a]=d};var Ea=function(a,b){a=a.replace(/\n/g,"$\x26"+b);return a.replace(/\s+$/,"")};var Va=function(a){return a instanceof x&&"arguments"===a.value};var ea=function(a){return a instanceof E||a instanceof h&&a.bound||a instanceof va};var Ya=function(a){return a.isComplex()||("function"===typeof a.isAssignable?a.isAssignable():void 0)};var Ba=function(a,b,c){if(a=b[c].unfoldSoak(a))return b[c]=a.body,a.body=new C(b),a}}).call(this);return f}(); -u["./sourcemap"]=function(){var f={};(function(){var u=function(){function f(f){this.line=f;this.columns=[]}f.prototype.add=function(f,a,b){var q=a[0];a=a[1];null==b&&(b={});if(!this.columns[f]||!b.noReplace)return this.columns[f]={line:this.line,column:f,sourceLine:q,sourceColumn:a}};f.prototype.sourceLocation=function(f){for(var a;!((a=this.columns[f])||0>=f);)f--;return a&&[a.sourceLine,a.sourceColumn]};return f}();f=function(){function f(){this.lines=[]}f.prototype.add=function(f,a,b){var q;null== -b&&(b={});var g=a[0];a=a[1];return((q=this.lines)[g]||(q[g]=new u(g))).add(a,f,b)};f.prototype.sourceLocation=function(f){var a;var b=f[0];for(f=f[1];!((a=this.lines[b])||0>=b);)b--;return a&&a.sourceLocation(f)};f.prototype.generate=function(f,a){var b,q,g,h,r,n,u;null==f&&(f={});null==a&&(a=null);var y=g=q=u=0;var I=!1;var F="";var Q=this.lines;var x=b=0;for(h=Q.length;bf?1:0);a||!b;)f=a&31,(a>>=5)&&(f|=32),b+=this.encodeBase64(f);return b};f.prototype.encodeBase64=function(f){var a;if(!(a= -"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"[f]))throw Error("Cannot Base64 encode value: "+f);return a};return f}()}).call(this);return f}();u["./coffee-script"]=function(){var f={};(function(){var qa,q,y={}.hasOwnProperty;var a=u("fs");var b=u("vm");var ya=u("path");var g=u("./lexer").Lexer;var h=u("./parser").parser;var r=u("./helpers");var n=u("./sourcemap");var B=u("../../package.json");f.VERSION=B.version;f.FILE_EXTENSIONS=[".coffee",".litcoffee",".coffee.md"];f.helpers= -r;var H=function(a){switch(!1){case "function"!==typeof Buffer:return(new Buffer(a)).toString("base64");case "function"!==typeof btoa:return btoa(encodeURIComponent(a).replace(/%([0-9A-F]{2})/g,function(a,b){return String.fromCharCode("0x"+b)}));default:throw Error("Unable to base64 encode inline sourcemap.");}};B=function(a){return function(b,f){null==f&&(f={});try{return a.call(this,b,f)}catch(m){if("string"!==typeof b)throw m;throw r.updateSyntaxError(m,b,f.filename);}}};var I={};var F={};f.compile= -qa=B(function(a,b){var c,f,g,l;var q=r.extend;b=q({},b);var u=b.sourceMap||b.inlineMap||null==b.filename;q=b.filename||"\x3canonymous\x3e";I[q]=a;u&&(g=new n);var x=O.tokenize(a,b);var y=b;var G=[];var z=0;for(c=x.length;z0;)1&t&&(n+=e),t>>>=1,e+=e;return n},e.compact=function(e){var t,n,i,r;for(r=[],t=0,i=e.length;i>t;t++)n=e[t],n&&r.push(n);return r},e.count=function(e,t){var n,i;if(n=i=0,!t.length)return 1/0;for(;i=1+e.indexOf(t,i);)n++;return n},e.merge=function(e,t){return n(n({},e),t)},n=e.extend=function(e,t){var n,i;for(n in t)i=t[n],e[n]=i;return e},e.flatten=i=function(e){var t,n,r,s;for(n=[],r=0,s=e.length;s>r;r++)t=e[r],"[object Array]"===Object.prototype.toString.call(t)?n=n.concat(i(t)):n.push(t);return n},e.del=function(e,t){var n;return n=e[t],delete e[t],n},e.some=null!=(r=Array.prototype.some)?r:function(e){var t,n,i;for(n=0,i=this.length;i>n;n++)if(t=this[n],e(t))return!0;return!1},e.invertLiterate=function(e){var t,n,i;return i=!0,n=function(){var n,r,s,o;for(s=e.split("\n"),o=[],n=0,r=s.length;r>n;n++)t=s[n],i&&/^([ ]{4}|[ ]{0,3}\t)/.test(t)?o.push(t):(i=/^\s*$/.test(t))?o.push(t):o.push("# "+t);return o}(),n.join("\n")},t=function(e,t){return t?{first_line:e.first_line,first_column:e.first_column,last_line:t.last_line,last_column:t.last_column}:e},e.addLocationDataFn=function(e,n){return function(i){return"object"==typeof i&&i.updateLocationDataIfMissing&&i.updateLocationDataIfMissing(t(e,n)),i}},e.locationDataToString=function(e){var t;return"2"in e&&"first_line"in e[2]?t=e[2]:"first_line"in e&&(t=e),t?t.first_line+1+":"+(t.first_column+1)+"-"+(t.last_line+1+":"+(t.last_column+1)):"No location data"},e.baseFileName=function(e,t,n){var i,r;return null==t&&(t=!1),null==n&&(n=!1),r=n?/\\|\//:/\//,i=e.split(r),e=i[i.length-1],t&&e.indexOf(".")>=0?(i=e.split("."),i.pop(),"coffee"===i[i.length-1]&&i.length>1&&i.pop(),i.join(".")):e},e.isCoffee=function(e){return/\.((lit)?coffee|coffee\.md)$/.test(e)},e.isLiterate=function(e){return/\.(litcoffee|coffee\.md)$/.test(e)},e.throwSyntaxError=function(e,t){var n;throw n=new SyntaxError(e),n.location=t,n.toString=o,n.stack=""+n,n},e.updateSyntaxError=function(e,t,n){return e.toString===o&&(e.code||(e.code=t),e.filename||(e.filename=n),e.stack=""+e),e},o=function(){var e,t,n,i,r,o,a,c,l,h,u,p,d,f,m;return this.code&&this.location?(u=this.location,a=u.first_line,o=u.first_column,l=u.last_line,c=u.last_column,null==l&&(l=a),null==c&&(c=o),r=this.filename||"[stdin]",e=this.code.split("\n")[a],m=o,i=a===l?c+1:e.length,h=e.slice(0,m).replace(/[^\s]/g," ")+s("^",i-m),"undefined"!=typeof process&&null!==process&&(n=(null!=(p=process.stdout)?p.isTTY:void 0)&&!(null!=(d=process.env)?d.NODE_DISABLE_COLORS:void 0)),(null!=(f=this.colorful)?f:n)&&(t=function(e){return""+e+""},e=e.slice(0,m)+t(e.slice(m,i))+e.slice(i),h=t(h)),r+":"+(a+1)+":"+(o+1)+": error: "+this.message+"\n"+e+"\n"+h):Error.prototype.toString.call(this)},e.nameWhitespaceCharacter=function(e){switch(e){case" ":return"space";case"\n":return"newline";case"\r":return"carriage return";case" ":return"tab";default:return e}}}.call(this),t.exports}(),require["./rewriter"]=function(){var e={},t={exports:e};return function(){var t,n,i,r,s,o,a,c,l,h,u,p,d,f,m,g,v,b,y,k=[].indexOf||function(e){for(var t=0,n=this.length;n>t;t++)if(t in this&&this[t]===e)return t;return-1},w=[].slice;for(f=function(e,t,n){var i;return i=[e,t],i.generated=!0,n&&(i.origin=n),i},e.Rewriter=function(){function e(){}return e.prototype.rewrite=function(e){return this.tokens=e,this.removeLeadingNewlines(),this.closeOpenCalls(),this.closeOpenIndexes(),this.normalizeLines(),this.tagPostfixConditionals(),this.addImplicitBracesAndParens(),this.addLocationDataToGeneratedTokens(),this.tokens},e.prototype.scanTokens=function(e){var t,n,i;for(i=this.tokens,t=0;n=i[t];)t+=e.call(this,n,t,i);return!0},e.prototype.detectEnd=function(e,t,n){var i,o,a,c,l;for(l=this.tokens,i=0;c=l[e];){if(0===i&&t.call(this,c,e))return n.call(this,c,e);if(!c||0>i)return n.call(this,c,e-1);o=c[0],k.call(s,o)>=0?i+=1:(a=c[0],k.call(r,a)>=0&&(i-=1)),e+=1}return e-1},e.prototype.removeLeadingNewlines=function(){var e,t,n,i,r;for(i=this.tokens,e=t=0,n=i.length;n>t&&(r=i[e][0],"TERMINATOR"===r);e=++t);return e?this.tokens.splice(0,e):void 0},e.prototype.closeOpenCalls=function(){var e,t;return t=function(e,t){var n;return")"===(n=e[0])||"CALL_END"===n||"OUTDENT"===e[0]&&")"===this.tag(t-1)},e=function(e,t){return this.tokens["OUTDENT"===e[0]?t-1:t][0]="CALL_END"},this.scanTokens(function(n,i){return"CALL_START"===n[0]&&this.detectEnd(i+1,t,e),1})},e.prototype.closeOpenIndexes=function(){var e,t;return t=function(e){var t;return"]"===(t=e[0])||"INDEX_END"===t},e=function(e){return e[0]="INDEX_END"},this.scanTokens(function(n,i){return"INDEX_START"===n[0]&&this.detectEnd(i+1,t,e),1})},e.prototype.indexOfTag=function(){var e,t,n,i,r,s,o;for(t=arguments[0],r=arguments.length>=2?w.call(arguments,1):[],e=0,n=i=0,s=r.length;s>=0?s>i:i>s;n=s>=0?++i:--i){for(;"HERECOMMENT"===this.tag(t+n+e);)e+=2;if(null!=r[n]&&("string"==typeof r[n]&&(r[n]=[r[n]]),o=this.tag(t+n+e),0>k.call(r[n],o)))return-1}return t+n+e-1},e.prototype.looksObjectish=function(e){var t,n;return this.indexOfTag(e,"@",null,":")>-1||this.indexOfTag(e,null,":")>-1?!0:(n=this.indexOfTag(e,s),n>-1&&(t=null,this.detectEnd(n+1,function(e){var t;return t=e[0],k.call(r,t)>=0},function(e,n){return t=n}),":"===this.tag(t+1))?!0:!1)},e.prototype.findTagsBackwards=function(e,t){var n,i,o,a,c,l,h;for(n=[];e>=0&&(n.length||(a=this.tag(e),0>k.call(t,a)&&(c=this.tag(e),0>k.call(s,c)||this.tokens[e].generated)&&(l=this.tag(e),0>k.call(u,l))));)i=this.tag(e),k.call(r,i)>=0&&n.push(this.tag(e)),o=this.tag(e),k.call(s,o)>=0&&n.length&&n.pop(),e-=1;return h=this.tag(e),k.call(t,h)>=0},e.prototype.addImplicitBracesAndParens=function(){var e,t;return e=[],t=null,this.scanTokens(function(i,h,p){var d,m,g,v,b,y,w,T,C,F,E,N,L,x,S,D,R,A,I,_,O,$,j,M,B,V,P,U;if(U=i[0],E=(N=h>0?p[h-1]:[])[0],C=(p.length-1>h?p[h+1]:[])[0],j=function(){return e[e.length-1]},M=h,g=function(e){return h-M+e},v=function(){var e,t;return null!=(e=j())?null!=(t=e[2])?t.ours:void 0:void 0},b=function(){var e;return v()&&"("===(null!=(e=j())?e[0]:void 0)},w=function(){var e;return v()&&"{"===(null!=(e=j())?e[0]:void 0)},y=function(){var e;return v&&"CONTROL"===(null!=(e=j())?e[0]:void 0)},B=function(t){var n;return n=null!=t?t:h,e.push(["(",n,{ours:!0}]),p.splice(n,0,f("CALL_START","(")),null==t?h+=1:void 0},d=function(){return e.pop(),p.splice(h,0,f("CALL_END",")",["","end of input",i[2]])),h+=1},V=function(t,n){var r,s;return null==n&&(n=!0),r=null!=t?t:h,e.push(["{",r,{sameLine:!0,startsLine:n,ours:!0}]),s=new String("{"),s.generated=!0,p.splice(r,0,f("{",s,i)),null==t?h+=1:void 0},m=function(t){return t=null!=t?t:h,e.pop(),p.splice(t,0,f("}","}",i)),h+=1},b()&&("IF"===U||"TRY"===U||"FINALLY"===U||"CATCH"===U||"CLASS"===U||"SWITCH"===U))return e.push(["CONTROL",h,{ours:!0}]),g(1);if("INDENT"===U&&v()){if("=>"!==E&&"->"!==E&&"["!==E&&"("!==E&&","!==E&&"{"!==E&&"TRY"!==E&&"ELSE"!==E&&"="!==E)for(;b();)d();return y()&&e.pop(),e.push([U,h]),g(1)}if(k.call(s,U)>=0)return e.push([U,h]),g(1);if(k.call(r,U)>=0){for(;v();)b()?d():w()?m():e.pop();t=e.pop()}if((k.call(c,U)>=0&&i.spaced||"?"===U&&h>0&&!p[h-1].spaced)&&(k.call(o,C)>=0||k.call(l,C)>=0&&!(null!=(L=p[h+1])?L.spaced:void 0)&&!(null!=(x=p[h+1])?x.newLine:void 0)))return"?"===U&&(U=i[0]="FUNC_EXIST"),B(h+1),g(2);if(k.call(c,U)>=0&&this.indexOfTag(h+1,"INDENT")>-1&&this.looksObjectish(h+2)&&!this.findTagsBackwards(h,["CLASS","EXTENDS","IF","CATCH","SWITCH","LEADING_WHEN","FOR","WHILE","UNTIL"]))return B(h+1),e.push(["INDENT",h+2]),g(3);if(":"===U){for(I=function(){var e;switch(!1){case e=this.tag(h-1),0>k.call(r,e):return t[1];case"@"!==this.tag(h-2):return h-2;default:return h-1}}.call(this);"HERECOMMENT"===this.tag(I-2);)I-=2;return this.insideForDeclaration="FOR"===C,P=0===I||(S=this.tag(I-1),k.call(u,S)>=0)||p[I-1].newLine,j()&&(D=j(),$=D[0],O=D[1],("{"===$||"INDENT"===$&&"{"===this.tag(O-1))&&(P||","===this.tag(I-1)||"{"===this.tag(I-1)))?g(1):(V(I,!!P),g(2))}if(w()&&k.call(u,U)>=0&&(j()[2].sameLine=!1),T="OUTDENT"===E||N.newLine,k.call(a,U)>=0||k.call(n,U)>=0&&T)for(;v();)if(R=j(),$=R[0],O=R[1],A=R[2],_=A.sameLine,P=A.startsLine,b()&&","!==E)d();else if(w()&&!this.insideForDeclaration&&_&&"TERMINATOR"!==U&&":"!==E)m();else{if(!w()||"TERMINATOR"!==U||","===E||P&&this.looksObjectish(h+1))break;if("HERECOMMENT"===C)return g(1);m()}if(!(","!==U||this.looksObjectish(h+1)||!w()||this.insideForDeclaration||"TERMINATOR"===C&&this.looksObjectish(h+2)))for(F="OUTDENT"===C?1:0;w();)m(h+F);return g(1)})},e.prototype.addLocationDataToGeneratedTokens=function(){return this.scanTokens(function(e,t,n){var i,r,s,o,a,c;return e[2]?1:e.generated||e.explicit?("{"===e[0]&&(s=null!=(a=n[t+1])?a[2]:void 0)?(r=s.first_line,i=s.first_column):(o=null!=(c=n[t-1])?c[2]:void 0)?(r=o.last_line,i=o.last_column):r=i=0,e[2]={first_line:r,first_column:i,last_line:r,last_column:i},1):1})},e.prototype.normalizeLines=function(){var e,t,r,s,o;return o=r=s=null,t=function(e,t){var r,s,a,c;return";"!==e[1]&&(r=e[0],k.call(p,r)>=0)&&!("TERMINATOR"===e[0]&&(s=this.tag(t+1),k.call(i,s)>=0))&&!("ELSE"===e[0]&&"THEN"!==o)&&!!("CATCH"!==(a=e[0])&&"FINALLY"!==a||"->"!==o&&"=>"!==o)||(c=e[0],k.call(n,c)>=0&&this.tokens[t-1].newLine)},e=function(e,t){return this.tokens.splice(","===this.tag(t-1)?t-1:t,0,s)},this.scanTokens(function(n,a,c){var l,h,u,p,f,m;if(m=n[0],"TERMINATOR"===m){if("ELSE"===this.tag(a+1)&&"OUTDENT"!==this.tag(a-1))return c.splice.apply(c,[a,1].concat(w.call(this.indentation()))),1;if(u=this.tag(a+1),k.call(i,u)>=0)return c.splice(a,1),0}if("CATCH"===m)for(l=h=1;2>=h;l=++h)if("OUTDENT"===(p=this.tag(a+l))||"TERMINATOR"===p||"FINALLY"===p)return c.splice.apply(c,[a+l,0].concat(w.call(this.indentation()))),2+l;return k.call(d,m)>=0&&"INDENT"!==this.tag(a+1)&&("ELSE"!==m||"IF"!==this.tag(a+1))?(o=m,f=this.indentation(c[a]),r=f[0],s=f[1],"THEN"===o&&(r.fromThen=!0),c.splice(a+1,0,r),this.detectEnd(a+2,t,e),"THEN"===m&&c.splice(a,1),1):1})},e.prototype.tagPostfixConditionals=function(){var e,t,n;return n=null,t=function(e,t){var n,i;return i=e[0],n=this.tokens[t-1][0],"TERMINATOR"===i||"INDENT"===i&&0>k.call(d,n)},e=function(e){return"INDENT"!==e[0]||e.generated&&!e.fromThen?n[0]="POST_"+n[0]:void 0},this.scanTokens(function(i,r){return"IF"!==i[0]?1:(n=i,this.detectEnd(r+1,t,e),1)})},e.prototype.indentation=function(e){var t,n;return t=["INDENT",2],n=["OUTDENT",2],e?(t.generated=n.generated=!0,t.origin=n.origin=e):t.explicit=n.explicit=!0,[t,n]},e.prototype.generate=f,e.prototype.tag=function(e){var t;return null!=(t=this.tokens[e])?t[0]:void 0},e}(),t=[["(",")"],["[","]"],["{","}"],["INDENT","OUTDENT"],["CALL_START","CALL_END"],["PARAM_START","PARAM_END"],["INDEX_START","INDEX_END"],["STRING_START","STRING_END"],["REGEX_START","REGEX_END"]],e.INVERSES=h={},s=[],r=[],m=0,v=t.length;v>m;m++)b=t[m],g=b[0],y=b[1],s.push(h[y]=g),r.push(h[g]=y);i=["CATCH","THEN","ELSE","FINALLY"].concat(r),c=["IDENTIFIER","SUPER",")","CALL_END","]","INDEX_END","@","THIS"],o=["IDENTIFIER","NUMBER","STRING","STRING_START","JS","REGEX","REGEX_START","NEW","PARAM_START","CLASS","IF","TRY","SWITCH","THIS","BOOL","NULL","UNDEFINED","UNARY","YIELD","UNARY_MATH","SUPER","THROW","@","->","=>","[","(","{","--","++"],l=["+","-"],a=["POST_IF","FOR","WHILE","UNTIL","WHEN","BY","LOOP","TERMINATOR"],d=["ELSE","->","=>","TRY","FINALLY","THEN"],p=["TERMINATOR","CATCH","FINALLY","ELSE","OUTDENT","LEADING_WHEN"],u=["TERMINATOR","INDENT","OUTDENT"],n=[".","?.","::","?::"]}.call(this),t.exports}(),require["./lexer"]=function(){var e={},t={exports:e};return function(){var t,n,i,r,s,o,a,c,l,h,u,p,d,f,m,g,v,b,y,k,w,T,C,F,E,N,L,x,S,D,R,A,I,_,O,$,j,M,B,V,P,U,G,H,q,X,W,Y,K,z,J,Q,Z,et,tt,nt,it,rt,st,ot,at,ct,lt,ht,ut=[].indexOf||function(e){for(var t=0,n=this.length;n>t;t++)if(t in this&&this[t]===e)return t;return-1};ot=require("./rewriter"),P=ot.Rewriter,w=ot.INVERSES,at=require("./helpers"),nt=at.count,lt=at.starts,tt=at.compact,ct=at.repeat,it=at.invertLiterate,st=at.locationDataToString,ht=at.throwSyntaxError,e.Lexer=S=function(){function e(){}return e.prototype.tokenize=function(e,t){var n,i,r,s;for(null==t&&(t={}),this.literate=t.literate,this.indent=0,this.baseIndent=0,this.indebt=0,this.outdebt=0,this.indents=[],this.ends=[],this.tokens=[],this.seenFor=!1,this.chunkLine=t.line||0,this.chunkColumn=t.column||0,e=this.clean(e),r=0;this.chunk=e.slice(r);)if(n=this.identifierToken()||this.commentToken()||this.whitespaceToken()||this.lineToken()||this.stringToken()||this.numberToken()||this.regexToken()||this.jsToken()||this.literalToken(),s=this.getLineAndColumnFromChunk(n),this.chunkLine=s[0],this.chunkColumn=s[1],r+=n,t.untilBalanced&&0===this.ends.length)return{tokens:this.tokens,index:r};return this.closeIndentation(),(i=this.ends.pop())&&this.error("missing "+i.tag,i.origin[2]),t.rewrite===!1?this.tokens:(new P).rewrite(this.tokens)},e.prototype.clean=function(e){return e.charCodeAt(0)===t&&(e=e.slice(1)),e=e.replace(/\r/g,"").replace(z,""),et.test(e)&&(e="\n"+e,this.chunkLine--),this.literate&&(e=it(e)),e},e.prototype.identifierToken=function(){var e,t,n,i,r,c,l,h,u,p,d,f,m,g,b,y;return(h=v.exec(this.chunk))?(l=h[0],r=h[1],t=h[2],c=r.length,u=void 0,"own"===r&&"FOR"===this.tag()?(this.token("OWN",r),r.length):"from"===r&&"YIELD"===this.tag()?(this.token("FROM",r),r.length):(d=this.tokens,p=d[d.length-1],i=t||null!=p&&("."===(f=p[0])||"?."===f||"::"===f||"?::"===f||!p.spaced&&"@"===p[0]),b="IDENTIFIER",!i&&(ut.call(F,r)>=0||ut.call(a,r)>=0)&&(b=r.toUpperCase(),"WHEN"===b&&(m=this.tag(),ut.call(N,m)>=0)?b="LEADING_WHEN":"FOR"===b?this.seenFor=!0:"UNLESS"===b?b="IF":ut.call(J,b)>=0?b="UNARY":ut.call(B,b)>=0&&("INSTANCEOF"!==b&&this.seenFor?(b="FOR"+b,this.seenFor=!1):(b="RELATION","!"===this.value()&&(u=this.tokens.pop(),r="!"+r)))),ut.call(C,r)>=0&&(i?(b="IDENTIFIER",r=new String(r),r.reserved=!0):ut.call(V,r)>=0&&this.error("reserved word '"+r+"'",{length:r.length})),i||(ut.call(s,r)>=0&&(e=r,r=o[r]),b=function(){switch(r){case"!":return"UNARY";case"==":case"!=":return"COMPARE";case"&&":case"||":return"LOGIC";case"true":case"false":return"BOOL";case"break":case"continue":return"STATEMENT";default:return b}}()),y=this.token(b,r,0,c),e&&(y.origin=[b,e,y[2]]),y.variable=!i,u&&(g=[u[2].first_line,u[2].first_column],y[2].first_line=g[0],y[2].first_column=g[1]),t&&(n=l.lastIndexOf(":"),this.token(":",":",n,t.length)),l.length)):0},e.prototype.numberToken=function(){var e,t,n,i,r;return(n=I.exec(this.chunk))?(i=n[0],t=i.length,/^0[BOX]/.test(i)?this.error("radix prefix in '"+i+"' must be lowercase",{offset:1}):/E/.test(i)&&!/^0x/.test(i)?this.error("exponential notation in '"+i+"' must be indicated with a lowercase 'e'",{offset:i.indexOf("E")}):/^0\d*[89]/.test(i)?this.error("decimal literal '"+i+"' must not be prefixed with '0'",{length:t}):/^0\d+/.test(i)&&this.error("octal literal '"+i+"' must be prefixed with '0o'",{length:t}),(r=/^0o([0-7]+)/.exec(i))&&(i="0x"+parseInt(r[1],8).toString(16)),(e=/^0b([01]+)/.exec(i))&&(i="0x"+parseInt(e[1],2).toString(16)),this.token("NUMBER",i,0,t),t):0},e.prototype.stringToken=function(){var e,t,n,i,r,s,o,a,c,l,h,u,m,g,v,b;if(h=(Y.exec(this.chunk)||[])[0],!h)return 0;if(g=function(){switch(h){case"'":return W;case'"':return q;case"'''":return f;case'"""':return p}}(),s=3===h.length,u=this.matchWithInterpolations(g,h),b=u.tokens,r=u.index,e=b.length-1,n=h.charAt(0),s){for(a=null,i=function(){var e,t,n;for(n=[],o=e=0,t=b.length;t>e;o=++e)v=b[o],"NEOSTRING"===v[0]&&n.push(v[1]);return n}().join("#{}");l=d.exec(i);)t=l[1],(null===a||(m=t.length)>0&&a.length>m)&&(a=t);a&&(c=RegExp("^"+a,"gm")),this.mergeInterpolationTokens(b,{delimiter:n},function(t){return function(n,i){return n=t.formatString(n),0===i&&(n=n.replace(E,"")),i===e&&(n=n.replace(K,"")),c&&(n=n.replace(c,"")),n}}(this))}else this.mergeInterpolationTokens(b,{delimiter:n},function(t){return function(n,i){return n=t.formatString(n),n=n.replace(G,function(t,r){return 0===i&&0===r||i===e&&r+t.length===n.length?"":" "})}}(this));return r},e.prototype.commentToken=function(){var e,t,n;return(n=this.chunk.match(c))?(e=n[0],t=n[1],t&&((n=u.exec(e))&&this.error("block comments cannot contain "+n[0],{offset:n.index,length:n[0].length}),t.indexOf("\n")>=0&&(t=t.replace(RegExp("\\n"+ct(" ",this.indent),"g"),"\n")),this.token("HERECOMMENT",t,0,e.length)),e.length):0},e.prototype.jsToken=function(){var e,t;return"`"===this.chunk.charAt(0)&&(e=T.exec(this.chunk))?(this.token("JS",(t=e[0]).slice(1,-1),0,t.length),t.length):0},e.prototype.regexToken=function(){var e,t,n,r,s,o,a,c,l,h,u,p,d;switch(!1){case!(o=M.exec(this.chunk)):this.error("regular expressions cannot begin with "+o[2],{offset:o.index+o[1].length});break;case!(o=this.matchWithInterpolations(m,"///")):d=o.tokens,s=o.index;break;case!(o=$.exec(this.chunk)):if(p=o[0],e=o[1],t=o[2],this.validateEscapes(e,{isRegex:!0,offsetInChunk:1}),s=p.length,l=this.tokens,c=l[l.length-1],c)if(c.spaced&&(h=c[0],ut.call(i,h)>=0)){if(!t||O.test(p))return 0}else if(u=c[0],ut.call(A,u)>=0)return 0;t||this.error("missing / (unclosed regex)");break;default:return 0}switch(r=j.exec(this.chunk.slice(s))[0],n=s+r.length,a=this.makeToken("REGEX",null,0,n),!1){case!!Z.test(r):this.error("invalid regular expression flags "+r,{offset:s,length:r.length});break;case!(p||1===d.length):null==e&&(e=this.formatHeregex(d[0][1])),this.token("REGEX",""+this.makeDelimitedLiteral(e,{delimiter:"/"})+r,0,n,a);break;default:this.token("REGEX_START","(",0,0,a),this.token("IDENTIFIER","RegExp",0,0),this.token("CALL_START","(",0,0),this.mergeInterpolationTokens(d,{delimiter:'"',"double":!0},this.formatHeregex),r&&(this.token(",",",",s,0),this.token("STRING",'"'+r+'"',s,r.length)),this.token(")",")",n,0),this.token("REGEX_END",")",n,0)}return n},e.prototype.lineToken=function(){var e,t,n,i,r;if(!(n=R.exec(this.chunk)))return 0;if(t=n[0],this.seenFor=!1,r=t.length-1-t.lastIndexOf("\n"),i=this.unfinished(),r-this.indebt===this.indent)return i?this.suppressNewlines():this.newlineToken(0),t.length;if(r>this.indent){if(i)return this.indebt=r-this.indent,this.suppressNewlines(),t.length;if(!this.tokens.length)return this.baseIndent=this.indent=r,t.length;e=r-this.indent+this.outdebt,this.token("INDENT",e,t.length-r,r),this.indents.push(e),this.ends.push({tag:"OUTDENT"}),this.outdebt=this.indebt=0,this.indent=r}else this.baseIndent>r?this.error("missing indentation",{offset:t.length}):(this.indebt=0,this.outdentToken(this.indent-r,i,t.length));return t.length},e.prototype.outdentToken=function(e,t,n){var i,r,s,o;for(i=this.indent-e;e>0;)s=this.indents[this.indents.length-1],s?s===this.outdebt?(e-=this.outdebt,this.outdebt=0):this.outdebt>s?(this.outdebt-=s,e-=s):(r=this.indents.pop()+this.outdebt,n&&(o=this.chunk[n],ut.call(b,o)>=0)&&(i-=r-e,e=r),this.outdebt=0,this.pair("OUTDENT"),this.token("OUTDENT",e,0,n),e-=r):e=0;for(r&&(this.outdebt-=e);";"===this.value();)this.tokens.pop();return"TERMINATOR"===this.tag()||t||this.token("TERMINATOR","\n",n,0),this.indent=i,this},e.prototype.whitespaceToken=function(){var e,t,n,i;return(e=et.exec(this.chunk))||(t="\n"===this.chunk.charAt(0))?(i=this.tokens,n=i[i.length-1],n&&(n[e?"spaced":"newLine"]=!0),e?e[0].length:0):0},e.prototype.newlineToken=function(e){for(;";"===this.value();)this.tokens.pop();return"TERMINATOR"!==this.tag()&&this.token("TERMINATOR","\n",e,0),this},e.prototype.suppressNewlines=function(){return"\\"===this.value()&&this.tokens.pop(),this},e.prototype.literalToken=function(){var e,t,n,s,o,a,c,u,p,d;if((e=_.exec(this.chunk))?(d=e[0],r.test(d)&&this.tagParameters()):d=this.chunk.charAt(0),u=d,n=this.tokens,t=n[n.length-1],"="===d&&t&&(!t[1].reserved&&(s=t[1],ut.call(C,s)>=0)&&(t.origin&&(t=t.origin),this.error("reserved word '"+t[1]+"' can't be assigned",t[2])),"||"===(o=t[1])||"&&"===o))return t[0]="COMPOUND_ASSIGN",t[1]+="=",d.length;if(";"===d)this.seenFor=!1,u="TERMINATOR";else if(ut.call(D,d)>=0)u="MATH";else if(ut.call(l,d)>=0)u="COMPARE";else if(ut.call(h,d)>=0)u="COMPOUND_ASSIGN";else if(ut.call(J,d)>=0)u="UNARY";else if(ut.call(Q,d)>=0)u="UNARY_MATH";else if(ut.call(U,d)>=0)u="SHIFT";else if(ut.call(x,d)>=0||"?"===d&&(null!=t?t.spaced:void 0))u="LOGIC";else if(t&&!t.spaced)if("("===d&&(a=t[0],ut.call(i,a)>=0))"?"===t[0]&&(t[0]="FUNC_EXIST"),u="CALL_START";else if("["===d&&(c=t[0],ut.call(y,c)>=0))switch(u="INDEX_START",t[0]){case"?":t[0]="INDEX_SOAK"}switch(p=this.makeToken(u,d),d){case"(":case"{":case"[":this.ends.push({tag:w[d],origin:p});break;case")":case"}":case"]":this.pair(d)}return this.tokens.push(p),d.length},e.prototype.tagParameters=function(){var e,t,n,i;if(")"!==this.tag())return this;for(t=[],i=this.tokens,e=i.length,i[--e][0]="PARAM_END";n=i[--e];)switch(n[0]){case")":t.push(n);break;case"(":case"CALL_START":if(!t.length)return"("===n[0]?(n[0]="PARAM_START",this):this;t.pop()}return this},e.prototype.closeIndentation=function(){return this.outdentToken(this.indent)},e.prototype.matchWithInterpolations=function(t,n){var i,r,s,o,a,c,l,h,u,p,d,f,m,g,v;if(v=[],h=n.length,this.chunk.slice(0,h)!==n)return null;for(m=this.chunk.slice(h);;){if(g=t.exec(m)[0],this.validateEscapes(g,{isRegex:"/"===n.charAt(0),offsetInChunk:h}),v.push(this.makeToken("NEOSTRING",g,h)),m=m.slice(g.length),h+=g.length,"#{"!==m.slice(0,2))break;p=this.getLineAndColumnFromChunk(h+1),c=p[0],r=p[1],d=(new e).tokenize(m.slice(1),{line:c,column:r,untilBalanced:!0}),l=d.tokens,o=d.index,o+=1,u=l[0],i=l[l.length-1],u[0]=u[1]="(",i[0]=i[1]=")",i.origin=["","end of interpolation",i[2]],"TERMINATOR"===(null!=(f=l[1])?f[0]:void 0)&&l.splice(1,1),v.push(["TOKENS",l]),m=m.slice(o),h+=o}return m.slice(0,n.length)!==n&&this.error("missing "+n,{length:n.length}),s=v[0],a=v[v.length-1],s[2].first_column-=n.length,a[2].last_column+=n.length,0===a[1].length&&(a[2].last_column-=1),{tokens:v,index:h+n.length}},e.prototype.mergeInterpolationTokens=function(e,t,n){var i,r,s,o,a,c,l,h,u,p,d,f,m,g,v,b;for(e.length>1&&(u=this.token("STRING_START","(",0,0)),s=this.tokens.length,o=a=0,l=e.length;l>a;o=++a){switch(g=e[o],m=g[0],b=g[1],m){case"TOKENS":if(2===b.length)continue;h=b[0],v=b;break;case"NEOSTRING":if(i=n(g[1],o),0===i.length){if(0!==o)continue;r=this.tokens.length}2===o&&null!=r&&this.tokens.splice(r,2),g[0]="STRING",g[1]=this.makeDelimitedLiteral(i,t),h=g,v=[g]}this.tokens.length>s&&(p=this.token("+","+"),p[2]={first_line:h[2].first_line,first_column:h[2].first_column,last_line:h[2].first_line,last_column:h[2].first_column}),(d=this.tokens).push.apply(d,v)}return u?(c=e[e.length-1],u.origin=["STRING",null,{first_line:u[2].first_line,first_column:u[2].first_column,last_line:c[2].last_line,last_column:c[2].last_column}],f=this.token("STRING_END",")"),f[2]={first_line:c[2].last_line,first_column:c[2].last_column,last_line:c[2].last_line,last_column:c[2].last_column}):void 0},e.prototype.pair=function(e){var t,n,i,r,s;return i=this.ends,n=i[i.length-1],e!==(s=null!=n?n.tag:void 0)?("OUTDENT"!==s&&this.error("unmatched "+e),r=this.indents,t=r[r.length-1],this.outdentToken(t,!0),this.pair(e)):this.ends.pop()},e.prototype.getLineAndColumnFromChunk=function(e){var t,n,i,r,s;return 0===e?[this.chunkLine,this.chunkColumn]:(s=e>=this.chunk.length?this.chunk:this.chunk.slice(0,+(e-1)+1||9e9),i=nt(s,"\n"),t=this.chunkColumn,i>0?(r=s.split("\n"),n=r[r.length-1],t=n.length):t+=s.length,[this.chunkLine+i,t])},e.prototype.makeToken=function(e,t,n,i){var r,s,o,a,c;return null==n&&(n=0),null==i&&(i=t.length),s={},o=this.getLineAndColumnFromChunk(n),s.first_line=o[0],s.first_column=o[1],r=Math.max(0,i-1),a=this.getLineAndColumnFromChunk(n+r),s.last_line=a[0],s.last_column=a[1],c=[e,t,s]},e.prototype.token=function(e,t,n,i,r){var s;return s=this.makeToken(e,t,n,i),r&&(s.origin=r),this.tokens.push(s),s},e.prototype.tag=function(){var e,t;return e=this.tokens,t=e[e.length-1],null!=t?t[0]:void 0},e.prototype.value=function(){var e,t;return e=this.tokens,t=e[e.length-1],null!=t?t[1]:void 0},e.prototype.unfinished=function(){var e;return L.test(this.chunk)||"\\"===(e=this.tag())||"."===e||"?."===e||"?::"===e||"UNARY"===e||"MATH"===e||"UNARY_MATH"===e||"+"===e||"-"===e||"YIELD"===e||"**"===e||"SHIFT"===e||"RELATION"===e||"COMPARE"===e||"LOGIC"===e||"THROW"===e||"EXTENDS"===e},e.prototype.formatString=function(e){return e.replace(X,"$1")},e.prototype.formatHeregex=function(e){return e.replace(g,"$1$2")},e.prototype.validateEscapes=function(e,t){var n,i,r,s,o,a,c,l;return null==t&&(t={}),s=k.exec(e),!s||(s[0],n=s[1],a=s[2],i=s[3],l=s[4],t.isRegex&&a&&"0"!==a.charAt(0))?void 0:(o=a?"octal escape sequences are not allowed":"invalid escape sequence",r="\\"+(a||i||l),this.error(o+" "+r,{offset:(null!=(c=t.offsetInChunk)?c:0)+s.index+n.length,length:r.length}))},e.prototype.makeDelimitedLiteral=function(e,t){var n;return null==t&&(t={}),""===e&&"/"===t.delimiter&&(e="(?:)"),n=RegExp("(\\\\\\\\)|(\\\\0(?=[1-7]))|\\\\?("+t.delimiter+")|\\\\?(?:(\\n)|(\\r)|(\\u2028)|(\\u2029))|(\\\\.)","g"),e=e.replace(n,function(e,n,i,r,s,o,a,c,l){switch(!1){case!n:return t.double?n+n:n;case!i:return"\\x00";case!r:return"\\"+r;case!s:return"\\n";case!o:return"\\r";case!a:return"\\u2028";case!c:return"\\u2029";case!l:return t.double?"\\"+l:l}}),""+t.delimiter+e+t.delimiter},e.prototype.error=function(e,t){var n,i,r,s,o,a;return null==t&&(t={}),r="first_line"in t?t:(o=this.getLineAndColumnFromChunk(null!=(s=t.offset)?s:0),i=o[0],n=o[1],o,{first_line:i,first_column:n,last_column:n+(null!=(a=t.length)?a:1)-1}),ht(e,r)},e}(),F=["true","false","null","this","new","delete","typeof","in","instanceof","return","throw","break","continue","debugger","yield","if","else","switch","for","while","do","try","catch","finally","class","extends","super"],a=["undefined","then","unless","until","loop","of","by","when"],o={and:"&&",or:"||",is:"==",isnt:"!=",not:"!",yes:"true",no:"false",on:"true",off:"false"},s=function(){var e;e=[];for(rt in o)e.push(rt);return e}(),a=a.concat(s),V=["case","default","function","var","void","with","const","let","enum","export","import","native","implements","interface","package","private","protected","public","static"],H=["arguments","eval","yield*"],C=F.concat(V).concat(H),e.RESERVED=V.concat(F).concat(a).concat(H),e.STRICT_PROSCRIBED=H,t=65279,v=/^(?!\d)((?:(?!\s)[$\w\x7f-\uffff])+)([^\n\S]*:(?!:))?/,I=/^0b[01]+|^0o[0-7]+|^0x[\da-f]+|^\d*\.?\d+(?:e[+-]?\d+)?/i,_=/^(?:[-=]>|[-+*\/%<>&|^!?=]=|>>>=?|([-+:])\1|([&|<>*\/%])\2=?|\?(\.|::)|\.{2,3})/,et=/^[^\n\S]+/,c=/^###([^#][\s\S]*?)(?:###[^\n\S]*|###$)|^(?:\s*#(?!##[^#]).*)+/,r=/^[-=]>/,R=/^(?:\n[^\n\S]*)+/,T=/^`[^\\`]*(?:\\.[^\\`]*)*`/,Y=/^(?:'''|"""|'|")/,W=/^(?:[^\\']|\\[\s\S])*/,q=/^(?:[^\\"#]|\\[\s\S]|\#(?!\{))*/,f=/^(?:[^\\']|\\[\s\S]|'(?!''))*/,p=/^(?:[^\\"#]|\\[\s\S]|"(?!"")|\#(?!\{))*/,X=/((?:\\\\)+)|\\[^\S\n]*\n\s*/g,G=/\s*\n\s*/g,d=/\n+([^\n\S]*)(?=\S)/g,$=/^\/(?!\/)((?:[^[\/\n\\]|\\[^\n]|\[(?:\\[^\n]|[^\]\n\\])*\])*)(\/)?/,j=/^\w*/,Z=/^(?!.*(.).*\1)[imgy]*$/,m=/^(?:[^\\\/#]|\\[\s\S]|\/(?!\/\/)|\#(?!\{))*/,g=/((?:\\\\)+)|\\(\s)|\s+(?:#.*)?/g,M=/^(\/|\/{3}\s*)(\*)/,O=/^\/=?\s/,u=/\*\//,L=/^\s*(?:,|\??\.(?![.\d])|::)/,k=/((?:^|[^\\])(?:\\\\)*)\\(?:(0[0-7]|[1-7])|(x(?![\da-fA-F]{2}).{0,2})|(u(?![\da-fA-F]{4}).{0,4}))/,E=/^[^\n\S]*\n/,K=/\n[^\n\S]*$/,z=/\s+$/,h=["-=","+=","/=","*=","%=","||=","&&=","?=","<<=",">>=",">>>=","&=","^=","|=","**=","//=","%%="],J=["NEW","TYPEOF","DELETE","DO"],Q=["!","~"],x=["&&","||","&","|","^"],U=["<<",">>",">>>"],l=["==","!=","<",">","<=",">="],D=["*","/","%","//","%%"],B=["IN","OF","INSTANCEOF"],n=["TRUE","FALSE"],i=["IDENTIFIER",")","]","?","@","THIS","SUPER"],y=i.concat(["NUMBER","STRING","STRING_END","REGEX","REGEX_END","BOOL","NULL","UNDEFINED","}","::"]),A=y.concat(["++","--"]),N=["INDENT","OUTDENT","TERMINATOR"],b=[")","}","]"]}.call(this),t.exports}(),require["./parser"]=function(){var e={},t={exports:e},n=function(){function e(){this.yy={}}var t=function(e,t,n,i){for(n=n||{},i=e.length;i--;n[e[i]]=t);return n},n=[1,20],i=[1,75],r=[1,71],s=[1,76],o=[1,77],a=[1,73],c=[1,74],l=[1,50],h=[1,52],u=[1,53],p=[1,54],d=[1,55],f=[1,45],m=[1,46],g=[1,27],v=[1,60],b=[1,61],y=[1,70],k=[1,43],w=[1,26],T=[1,58],C=[1,59],F=[1,57],E=[1,38],N=[1,44],L=[1,56],x=[1,65],S=[1,66],D=[1,67],R=[1,68],A=[1,42],I=[1,64],_=[1,29],O=[1,30],$=[1,31],j=[1,32],M=[1,33],B=[1,34],V=[1,35],P=[1,78],U=[1,6,26,34,109],G=[1,88],H=[1,81],q=[1,80],X=[1,79],W=[1,82],Y=[1,83],K=[1,84],z=[1,85],J=[1,86],Q=[1,87],Z=[1,91],et=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],tt=[1,97],nt=[1,98],it=[1,99],rt=[1,100],st=[1,102],ot=[1,103],at=[1,96],ct=[2,115],lt=[1,6,25,26,34,56,61,64,73,74,75,76,78,80,81,85,91,92,93,98,100,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],ht=[2,82],ut=[1,108],pt=[2,61],dt=[1,112],ft=[1,117],mt=[1,118],gt=[1,120],vt=[1,6,25,26,34,46,56,61,64,73,74,75,76,78,80,81,85,91,92,93,98,100,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],bt=[2,79],yt=[1,6,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],kt=[1,155],wt=[1,157],Tt=[1,152],Ct=[1,6,25,26,34,46,56,61,64,73,74,75,76,78,80,81,85,87,91,92,93,98,100,109,111,112,113,117,118,133,136,137,140,141,142,143,144,145,146,147,148,149],Ft=[2,98],Et=[1,6,25,26,34,49,56,61,64,73,74,75,76,78,80,81,85,91,92,93,98,100,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],Nt=[1,6,25,26,34,46,49,56,61,64,73,74,75,76,78,80,81,85,87,91,92,93,98,100,109,111,112,113,117,118,124,125,133,136,137,140,141,142,143,144,145,146,147,148,149],Lt=[1,207],xt=[1,206],St=[1,6,25,26,34,38,56,61,64,73,74,75,76,78,80,81,85,91,92,93,98,100,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],Dt=[2,59],Rt=[1,217],At=[6,25,26,56,61],It=[6,25,26,46,56,61,64],_t=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,136,137,143,145,146,147,148],Ot=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133],$t=[73,74,75,76,78,81,91,92],jt=[1,236],Mt=[2,136],Bt=[1,6,25,26,34,46,56,61,64,73,74,75,76,78,80,81,85,91,92,93,98,100,109,111,112,113,117,118,124,125,133,136,137,142,143,144,145,146,147,148],Vt=[1,245],Pt=[6,25,26,61,93,98],Ut=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,118,133],Gt=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,112,118,133],Ht=[124,125],qt=[61,124,125],Xt=[1,256],Wt=[6,25,26,61,85],Yt=[6,25,26,49,61,85],Kt=[6,25,26,46,49,61,85],zt=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,136,137,145,146,147,148],Jt=[11,28,30,32,33,36,37,40,41,42,43,44,52,53,54,58,59,80,83,86,90,95,96,97,103,107,108,111,113,115,117,126,132,134,135,136,137,138,140,141],Qt=[2,125],Zt=[6,25,26],en=[2,60],tn=[1,270],nn=[1,271],rn=[1,6,25,26,34,56,61,64,80,85,93,98,100,105,106,109,111,112,113,117,118,128,130,133,136,137,142,143,144,145,146,147,148],sn=[26,128,130],on=[1,6,26,34,56,61,64,80,85,93,98,100,109,112,118,133],an=[2,74],cn=[1,293],ln=[1,294],hn=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,128,133,136,137,142,143,144,145,146,147,148],un=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,113,117,118,133],pn=[1,305],dn=[1,306],fn=[6,25,26,61],mn=[1,6,25,26,34,56,61,64,80,85,93,98,100,105,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],gn=[25,61],vn={trace:function(){},yy:{},symbols_:{error:2,Root:3,Body:4,Line:5,TERMINATOR:6,Expression:7,Statement:8,Return:9,Comment:10,STATEMENT:11,Value:12,Invocation:13,Code:14,Operation:15,Assign:16,If:17,Try:18,While:19,For:20,Switch:21,Class:22,Throw:23,Block:24,INDENT:25,OUTDENT:26,Identifier:27,IDENTIFIER:28,AlphaNumeric:29,NUMBER:30,String:31,STRING:32,STRING_START:33,STRING_END:34,Regex:35,REGEX:36,REGEX_START:37,REGEX_END:38,Literal:39,JS:40,DEBUGGER:41,UNDEFINED:42,NULL:43,BOOL:44,Assignable:45,"=":46,AssignObj:47,ObjAssignable:48,":":49,SimpleObjAssignable:50,ThisProperty:51,RETURN:52,HERECOMMENT:53,PARAM_START:54,ParamList:55,PARAM_END:56,FuncGlyph:57,"->":58,"=>":59,OptComma:60,",":61,Param:62,ParamVar:63,"...":64,Array:65,Object:66,Splat:67,SimpleAssignable:68,Accessor:69,Parenthetical:70,Range:71,This:72,".":73,"?.":74,"::":75,"?::":76,Index:77,INDEX_START:78,IndexValue:79,INDEX_END:80,INDEX_SOAK:81,Slice:82,"{":83,AssignList:84,"}":85,CLASS:86,EXTENDS:87,OptFuncExist:88,Arguments:89,SUPER:90,FUNC_EXIST:91,CALL_START:92,CALL_END:93,ArgList:94,THIS:95,"@":96,"[":97,"]":98,RangeDots:99,"..":100,Arg:101,SimpleArgs:102,TRY:103,Catch:104,FINALLY:105,CATCH:106,THROW:107,"(":108,")":109,WhileSource:110,WHILE:111,WHEN:112,UNTIL:113,Loop:114,LOOP:115,ForBody:116,FOR:117,BY:118,ForStart:119,ForSource:120,ForVariables:121,OWN:122,ForValue:123,FORIN:124,FOROF:125,SWITCH:126,Whens:127,ELSE:128,When:129,LEADING_WHEN:130,IfBlock:131,IF:132,POST_IF:133,UNARY:134,UNARY_MATH:135,"-":136,"+":137,YIELD:138,FROM:139,"--":140,"++":141,"?":142,MATH:143,"**":144,SHIFT:145,COMPARE:146,LOGIC:147,RELATION:148,COMPOUND_ASSIGN:149,$accept:0,$end:1},terminals_:{2:"error",6:"TERMINATOR",11:"STATEMENT",25:"INDENT",26:"OUTDENT",28:"IDENTIFIER",30:"NUMBER",32:"STRING",33:"STRING_START",34:"STRING_END",36:"REGEX",37:"REGEX_START",38:"REGEX_END",40:"JS",41:"DEBUGGER",42:"UNDEFINED",43:"NULL",44:"BOOL",46:"=",49:":",52:"RETURN",53:"HERECOMMENT",54:"PARAM_START",56:"PARAM_END",58:"->",59:"=>",61:",",64:"...",73:".",74:"?.",75:"::",76:"?::",78:"INDEX_START",80:"INDEX_END",81:"INDEX_SOAK",83:"{",85:"}",86:"CLASS",87:"EXTENDS",90:"SUPER",91:"FUNC_EXIST",92:"CALL_START",93:"CALL_END",95:"THIS",96:"@",97:"[",98:"]",100:"..",103:"TRY",105:"FINALLY",106:"CATCH",107:"THROW",108:"(",109:")",111:"WHILE",112:"WHEN",113:"UNTIL",115:"LOOP",117:"FOR",118:"BY",122:"OWN",124:"FORIN",125:"FOROF",126:"SWITCH",128:"ELSE",130:"LEADING_WHEN",132:"IF",133:"POST_IF",134:"UNARY",135:"UNARY_MATH",136:"-",137:"+",138:"YIELD",139:"FROM",140:"--",141:"++",142:"?",143:"MATH",144:"**",145:"SHIFT",146:"COMPARE",147:"LOGIC",148:"RELATION",149:"COMPOUND_ASSIGN"},productions_:[0,[3,0],[3,1],[4,1],[4,3],[4,2],[5,1],[5,1],[8,1],[8,1],[8,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[24,2],[24,3],[27,1],[29,1],[29,1],[31,1],[31,3],[35,1],[35,3],[39,1],[39,1],[39,1],[39,1],[39,1],[39,1],[39,1],[16,3],[16,4],[16,5],[47,1],[47,3],[47,5],[47,3],[47,5],[47,1],[50,1],[50,1],[48,1],[48,1],[9,2],[9,1],[10,1],[14,5],[14,2],[57,1],[57,1],[60,0],[60,1],[55,0],[55,1],[55,3],[55,4],[55,6],[62,1],[62,2],[62,3],[62,1],[63,1],[63,1],[63,1],[63,1],[67,2],[68,1],[68,2],[68,2],[68,1],[45,1],[45,1],[45,1],[12,1],[12,1],[12,1],[12,1],[12,1],[69,2],[69,2],[69,2],[69,2],[69,1],[69,1],[77,3],[77,2],[79,1],[79,1],[66,4],[84,0],[84,1],[84,3],[84,4],[84,6],[22,1],[22,2],[22,3],[22,4],[22,2],[22,3],[22,4],[22,5],[13,3],[13,3],[13,1],[13,2],[88,0],[88,1],[89,2],[89,4],[72,1],[72,1],[51,2],[65,2],[65,4],[99,1],[99,1],[71,5],[82,3],[82,2],[82,2],[82,1],[94,1],[94,3],[94,4],[94,4],[94,6],[101,1],[101,1],[101,1],[102,1],[102,3],[18,2],[18,3],[18,4],[18,5],[104,3],[104,3],[104,2],[23,2],[70,3],[70,5],[110,2],[110,4],[110,2],[110,4],[19,2],[19,2],[19,2],[19,1],[114,2],[114,2],[20,2],[20,2],[20,2],[116,2],[116,4],[116,2],[119,2],[119,3],[123,1],[123,1],[123,1],[123,1],[121,1],[121,3],[120,2],[120,2],[120,4],[120,4],[120,4],[120,6],[120,6],[21,5],[21,7],[21,4],[21,6],[127,1],[127,2],[129,3],[129,4],[131,3],[131,5],[17,1],[17,3],[17,3],[17,3],[15,2],[15,2],[15,2],[15,2],[15,2],[15,2],[15,3],[15,2],[15,2],[15,2],[15,2],[15,2],[15,3],[15,3],[15,3],[15,3],[15,3],[15,3],[15,3],[15,3],[15,3],[15,5],[15,4],[15,3]],performAction:function(e,t,n,i,r,s,o){var a=s.length-1; +switch(r){case 1:return this.$=i.addLocationDataFn(o[a],o[a])(new i.Block);case 2:return this.$=s[a];case 3:this.$=i.addLocationDataFn(o[a],o[a])(i.Block.wrap([s[a]]));break;case 4:this.$=i.addLocationDataFn(o[a-2],o[a])(s[a-2].push(s[a]));break;case 5:this.$=s[a-1];break;case 6:case 7:case 8:case 9:case 11:case 12:case 13:case 14:case 15:case 16:case 17:case 18:case 19:case 20:case 21:case 22:case 27:case 32:case 34:case 47:case 48:case 49:case 50:case 51:case 59:case 60:case 70:case 71:case 72:case 73:case 78:case 79:case 82:case 86:case 92:case 136:case 137:case 139:case 169:case 170:case 186:case 192:this.$=s[a];break;case 10:case 25:case 26:case 28:case 30:case 33:case 35:this.$=i.addLocationDataFn(o[a],o[a])(new i.Literal(s[a]));break;case 23:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Block);break;case 24:case 31:case 93:this.$=i.addLocationDataFn(o[a-2],o[a])(s[a-1]);break;case 29:case 149:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Parens(s[a-1]));break;case 36:this.$=i.addLocationDataFn(o[a],o[a])(new i.Undefined);break;case 37:this.$=i.addLocationDataFn(o[a],o[a])(new i.Null);break;case 38:this.$=i.addLocationDataFn(o[a],o[a])(new i.Bool(s[a]));break;case 39:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Assign(s[a-2],s[a]));break;case 40:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Assign(s[a-3],s[a]));break;case 41:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Assign(s[a-4],s[a-1]));break;case 42:case 75:case 80:case 81:case 83:case 84:case 85:case 171:case 172:this.$=i.addLocationDataFn(o[a],o[a])(new i.Value(s[a]));break;case 43:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Assign(i.addLocationDataFn(o[a-2])(new i.Value(s[a-2])),s[a],"object",{operatorToken:i.addLocationDataFn(o[a-1])(new i.Literal(s[a-1]))}));break;case 44:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Assign(i.addLocationDataFn(o[a-4])(new i.Value(s[a-4])),s[a-1],"object",{operatorToken:i.addLocationDataFn(o[a-3])(new i.Literal(s[a-3]))}));break;case 45:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Assign(i.addLocationDataFn(o[a-2])(new i.Value(s[a-2])),s[a],null,{operatorToken:i.addLocationDataFn(o[a-1])(new i.Literal(s[a-1]))}));break;case 46:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Assign(i.addLocationDataFn(o[a-4])(new i.Value(s[a-4])),s[a-1],null,{operatorToken:i.addLocationDataFn(o[a-3])(new i.Literal(s[a-3]))}));break;case 52:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Return(s[a]));break;case 53:this.$=i.addLocationDataFn(o[a],o[a])(new i.Return);break;case 54:this.$=i.addLocationDataFn(o[a],o[a])(new i.Comment(s[a]));break;case 55:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Code(s[a-3],s[a],s[a-1]));break;case 56:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Code([],s[a],s[a-1]));break;case 57:this.$=i.addLocationDataFn(o[a],o[a])("func");break;case 58:this.$=i.addLocationDataFn(o[a],o[a])("boundfunc");break;case 61:case 98:this.$=i.addLocationDataFn(o[a],o[a])([]);break;case 62:case 99:case 131:case 173:this.$=i.addLocationDataFn(o[a],o[a])([s[a]]);break;case 63:case 100:case 132:this.$=i.addLocationDataFn(o[a-2],o[a])(s[a-2].concat(s[a]));break;case 64:case 101:case 133:this.$=i.addLocationDataFn(o[a-3],o[a])(s[a-3].concat(s[a]));break;case 65:case 102:case 135:this.$=i.addLocationDataFn(o[a-5],o[a])(s[a-5].concat(s[a-2]));break;case 66:this.$=i.addLocationDataFn(o[a],o[a])(new i.Param(s[a]));break;case 67:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Param(s[a-1],null,!0));break;case 68:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Param(s[a-2],s[a]));break;case 69:case 138:this.$=i.addLocationDataFn(o[a],o[a])(new i.Expansion);break;case 74:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Splat(s[a-1]));break;case 76:this.$=i.addLocationDataFn(o[a-1],o[a])(s[a-1].add(s[a]));break;case 77:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Value(s[a-1],[].concat(s[a])));break;case 87:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Access(s[a]));break;case 88:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Access(s[a],"soak"));break;case 89:this.$=i.addLocationDataFn(o[a-1],o[a])([i.addLocationDataFn(o[a-1])(new i.Access(new i.Literal("prototype"))),i.addLocationDataFn(o[a])(new i.Access(s[a]))]);break;case 90:this.$=i.addLocationDataFn(o[a-1],o[a])([i.addLocationDataFn(o[a-1])(new i.Access(new i.Literal("prototype"),"soak")),i.addLocationDataFn(o[a])(new i.Access(s[a]))]);break;case 91:this.$=i.addLocationDataFn(o[a],o[a])(new i.Access(new i.Literal("prototype")));break;case 94:this.$=i.addLocationDataFn(o[a-1],o[a])(i.extend(s[a],{soak:!0}));break;case 95:this.$=i.addLocationDataFn(o[a],o[a])(new i.Index(s[a]));break;case 96:this.$=i.addLocationDataFn(o[a],o[a])(new i.Slice(s[a]));break;case 97:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Obj(s[a-2],s[a-3].generated));break;case 103:this.$=i.addLocationDataFn(o[a],o[a])(new i.Class);break;case 104:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Class(null,null,s[a]));break;case 105:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Class(null,s[a]));break;case 106:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Class(null,s[a-1],s[a]));break;case 107:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Class(s[a]));break;case 108:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Class(s[a-1],null,s[a]));break;case 109:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Class(s[a-2],s[a]));break;case 110:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Class(s[a-3],s[a-1],s[a]));break;case 111:case 112:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Call(s[a-2],s[a],s[a-1]));break;case 113:this.$=i.addLocationDataFn(o[a],o[a])(new i.Call("super",[new i.Splat(new i.Literal("arguments"))]));break;case 114:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Call("super",s[a]));break;case 115:this.$=i.addLocationDataFn(o[a],o[a])(!1);break;case 116:this.$=i.addLocationDataFn(o[a],o[a])(!0);break;case 117:this.$=i.addLocationDataFn(o[a-1],o[a])([]);break;case 118:case 134:this.$=i.addLocationDataFn(o[a-3],o[a])(s[a-2]);break;case 119:case 120:this.$=i.addLocationDataFn(o[a],o[a])(new i.Value(new i.Literal("this")));break;case 121:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Value(i.addLocationDataFn(o[a-1])(new i.Literal("this")),[i.addLocationDataFn(o[a])(new i.Access(s[a]))],"this"));break;case 122:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Arr([]));break;case 123:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Arr(s[a-2]));break;case 124:this.$=i.addLocationDataFn(o[a],o[a])("inclusive");break;case 125:this.$=i.addLocationDataFn(o[a],o[a])("exclusive");break;case 126:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Range(s[a-3],s[a-1],s[a-2]));break;case 127:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Range(s[a-2],s[a],s[a-1]));break;case 128:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Range(s[a-1],null,s[a]));break;case 129:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Range(null,s[a],s[a-1]));break;case 130:this.$=i.addLocationDataFn(o[a],o[a])(new i.Range(null,null,s[a]));break;case 140:this.$=i.addLocationDataFn(o[a-2],o[a])([].concat(s[a-2],s[a]));break;case 141:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Try(s[a]));break;case 142:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Try(s[a-1],s[a][0],s[a][1]));break;case 143:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Try(s[a-2],null,null,s[a]));break;case 144:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Try(s[a-3],s[a-2][0],s[a-2][1],s[a]));break;case 145:this.$=i.addLocationDataFn(o[a-2],o[a])([s[a-1],s[a]]);break;case 146:this.$=i.addLocationDataFn(o[a-2],o[a])([i.addLocationDataFn(o[a-1])(new i.Value(s[a-1])),s[a]]);break;case 147:this.$=i.addLocationDataFn(o[a-1],o[a])([null,s[a]]);break;case 148:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Throw(s[a]));break;case 150:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Parens(s[a-2]));break;case 151:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.While(s[a]));break;case 152:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.While(s[a-2],{guard:s[a]}));break;case 153:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.While(s[a],{invert:!0}));break;case 154:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.While(s[a-2],{invert:!0,guard:s[a]}));break;case 155:this.$=i.addLocationDataFn(o[a-1],o[a])(s[a-1].addBody(s[a]));break;case 156:case 157:this.$=i.addLocationDataFn(o[a-1],o[a])(s[a].addBody(i.addLocationDataFn(o[a-1])(i.Block.wrap([s[a-1]]))));break;case 158:this.$=i.addLocationDataFn(o[a],o[a])(s[a]);break;case 159:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.While(i.addLocationDataFn(o[a-1])(new i.Literal("true"))).addBody(s[a]));break;case 160:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.While(i.addLocationDataFn(o[a-1])(new i.Literal("true"))).addBody(i.addLocationDataFn(o[a])(i.Block.wrap([s[a]]))));break;case 161:case 162:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.For(s[a-1],s[a]));break;case 163:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.For(s[a],s[a-1]));break;case 164:this.$=i.addLocationDataFn(o[a-1],o[a])({source:i.addLocationDataFn(o[a])(new i.Value(s[a]))});break;case 165:this.$=i.addLocationDataFn(o[a-3],o[a])({source:i.addLocationDataFn(o[a-2])(new i.Value(s[a-2])),step:s[a]});break;case 166:this.$=i.addLocationDataFn(o[a-1],o[a])(function(){return s[a].own=s[a-1].own,s[a].name=s[a-1][0],s[a].index=s[a-1][1],s[a]}());break;case 167:this.$=i.addLocationDataFn(o[a-1],o[a])(s[a]);break;case 168:this.$=i.addLocationDataFn(o[a-2],o[a])(function(){return s[a].own=!0,s[a]}());break;case 174:this.$=i.addLocationDataFn(o[a-2],o[a])([s[a-2],s[a]]);break;case 175:this.$=i.addLocationDataFn(o[a-1],o[a])({source:s[a]});break;case 176:this.$=i.addLocationDataFn(o[a-1],o[a])({source:s[a],object:!0});break;case 177:this.$=i.addLocationDataFn(o[a-3],o[a])({source:s[a-2],guard:s[a]});break;case 178:this.$=i.addLocationDataFn(o[a-3],o[a])({source:s[a-2],guard:s[a],object:!0});break;case 179:this.$=i.addLocationDataFn(o[a-3],o[a])({source:s[a-2],step:s[a]});break;case 180:this.$=i.addLocationDataFn(o[a-5],o[a])({source:s[a-4],guard:s[a-2],step:s[a]});break;case 181:this.$=i.addLocationDataFn(o[a-5],o[a])({source:s[a-4],step:s[a-2],guard:s[a]});break;case 182:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Switch(s[a-3],s[a-1]));break;case 183:this.$=i.addLocationDataFn(o[a-6],o[a])(new i.Switch(s[a-5],s[a-3],s[a-1]));break;case 184:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Switch(null,s[a-1]));break;case 185:this.$=i.addLocationDataFn(o[a-5],o[a])(new i.Switch(null,s[a-3],s[a-1]));break;case 187:this.$=i.addLocationDataFn(o[a-1],o[a])(s[a-1].concat(s[a]));break;case 188:this.$=i.addLocationDataFn(o[a-2],o[a])([[s[a-1],s[a]]]);break;case 189:this.$=i.addLocationDataFn(o[a-3],o[a])([[s[a-2],s[a-1]]]);break;case 190:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.If(s[a-1],s[a],{type:s[a-2]}));break;case 191:this.$=i.addLocationDataFn(o[a-4],o[a])(s[a-4].addElse(i.addLocationDataFn(o[a-2],o[a])(new i.If(s[a-1],s[a],{type:s[a-2]}))));break;case 193:this.$=i.addLocationDataFn(o[a-2],o[a])(s[a-2].addElse(s[a]));break;case 194:case 195:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.If(s[a],i.addLocationDataFn(o[a-2])(i.Block.wrap([s[a-2]])),{type:s[a-1],statement:!0}));break;case 196:case 197:case 200:case 201:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Op(s[a-1],s[a]));break;case 198:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Op("-",s[a]));break;case 199:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Op("+",s[a]));break;case 202:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Op(s[a-2].concat(s[a-1]),s[a]));break;case 203:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Op("--",s[a]));break;case 204:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Op("++",s[a]));break;case 205:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Op("--",s[a-1],null,!0));break;case 206:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Op("++",s[a-1],null,!0));break;case 207:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Existence(s[a-1]));break;case 208:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Op("+",s[a-2],s[a]));break;case 209:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Op("-",s[a-2],s[a]));break;case 210:case 211:case 212:case 213:case 214:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Op(s[a-1],s[a-2],s[a]));break;case 215:this.$=i.addLocationDataFn(o[a-2],o[a])(function(){return"!"===s[a-1].charAt(0)?new i.Op(s[a-1].slice(1),s[a-2],s[a]).invert():new i.Op(s[a-1],s[a-2],s[a])}());break;case 216:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Assign(s[a-2],s[a],s[a-1]));break;case 217:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Assign(s[a-4],s[a-1],s[a-3]));break;case 218:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Assign(s[a-3],s[a],s[a-2]));break;case 219:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Extends(s[a-2],s[a]))}},table:[{1:[2,1],3:1,4:2,5:3,7:4,8:5,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{1:[3]},{1:[2,2],6:P},t(U,[2,3]),t(U,[2,6],{119:69,110:89,116:90,111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(U,[2,7],{119:69,110:92,116:93,111:x,113:S,117:R,133:Z}),t(et,[2,11],{88:94,69:95,77:101,73:tt,74:nt,75:it,76:rt,78:st,81:ot,91:at,92:ct}),t(et,[2,12],{77:101,88:104,69:105,73:tt,74:nt,75:it,76:rt,78:st,81:ot,91:at,92:ct}),t(et,[2,13]),t(et,[2,14]),t(et,[2,15]),t(et,[2,16]),t(et,[2,17]),t(et,[2,18]),t(et,[2,19]),t(et,[2,20]),t(et,[2,21]),t(et,[2,22]),t(et,[2,8]),t(et,[2,9]),t(et,[2,10]),t(lt,ht,{46:[1,106]}),t(lt,[2,83]),t(lt,[2,84]),t(lt,[2,85]),t(lt,[2,86]),t([1,6,25,26,34,38,56,61,64,73,74,75,76,78,80,81,85,91,93,98,100,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],[2,113],{89:107,92:ut}),t([6,25,56,61],pt,{55:109,62:110,63:111,27:113,51:114,65:115,66:116,28:i,64:dt,83:y,96:ft,97:mt}),{24:119,25:gt},{7:121,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:123,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:124,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:125,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:127,8:126,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,139:[1,128],140:B,141:V},{12:130,13:131,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:132,51:63,65:47,66:48,68:129,70:23,71:24,72:25,83:y,90:w,95:T,96:C,97:F,108:L},{12:130,13:131,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:132,51:63,65:47,66:48,68:133,70:23,71:24,72:25,83:y,90:w,95:T,96:C,97:F,108:L},t(vt,bt,{87:[1,137],140:[1,134],141:[1,135],149:[1,136]}),t(et,[2,192],{128:[1,138]}),{24:139,25:gt},{24:140,25:gt},t(et,[2,158]),{24:141,25:gt},{7:142,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:[1,143],27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(yt,[2,103],{39:22,70:23,71:24,72:25,65:47,66:48,29:49,35:51,27:62,51:63,31:72,12:130,13:131,45:132,24:144,68:146,25:gt,28:i,30:r,32:s,33:o,36:a,37:c,40:l,41:h,42:u,43:p,44:d,83:y,87:[1,145],90:w,95:T,96:C,97:F,108:L}),{7:147,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,142,143,144,145,146,147,148],[2,53],{12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,9:18,10:19,45:21,39:22,70:23,71:24,72:25,57:28,68:36,131:37,110:39,114:40,116:41,65:47,66:48,29:49,35:51,27:62,51:63,119:69,31:72,8:122,7:148,11:n,28:i,30:r,32:s,33:o,36:a,37:c,40:l,41:h,42:u,43:p,44:d,52:f,53:m,54:g,58:v,59:b,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,115:D,126:A,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V}),t(et,[2,54]),t(vt,[2,80]),t(vt,[2,81]),t(lt,[2,32]),t(lt,[2,33]),t(lt,[2,34]),t(lt,[2,35]),t(lt,[2,36]),t(lt,[2,37]),t(lt,[2,38]),{4:149,5:3,7:4,8:5,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:[1,150],27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:151,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:kt,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,64:wt,65:47,66:48,67:156,68:36,70:23,71:24,72:25,83:y,86:k,90:w,94:153,95:T,96:C,97:F,98:Tt,101:154,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(lt,[2,119]),t(lt,[2,120],{27:158,28:i}),{25:[2,57]},{25:[2,58]},t(Ct,[2,75]),t(Ct,[2,78]),{7:159,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:160,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:161,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:163,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,24:162,25:gt,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{27:168,28:i,51:169,65:170,66:171,71:164,83:y,96:ft,97:F,121:165,122:[1,166],123:167},{120:172,124:[1,173],125:[1,174]},t([6,25,61,85],Ft,{31:72,84:175,47:176,48:177,50:178,10:179,29:180,27:181,51:182,28:i,30:r,32:s,33:o,53:m,96:ft}),t(Et,[2,26]),t(Et,[2,27]),t(lt,[2,30]),{12:130,13:183,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:132,51:63,65:47,66:48,68:184,70:23,71:24,72:25,83:y,90:w,95:T,96:C,97:F,108:L},t(Nt,[2,25]),t(Et,[2,28]),{4:185,5:3,7:4,8:5,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(U,[2,5],{7:4,8:5,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,9:18,10:19,45:21,39:22,70:23,71:24,72:25,57:28,68:36,131:37,110:39,114:40,116:41,65:47,66:48,29:49,35:51,27:62,51:63,119:69,31:72,5:186,11:n,28:i,30:r,32:s,33:o,36:a,37:c,40:l,41:h,42:u,43:p,44:d,52:f,53:m,54:g,58:v,59:b,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,111:x,113:S,115:D,117:R,126:A,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V}),t(et,[2,207]),{7:187,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:188,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:189,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:190,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:191,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:192,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:193,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:194,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:195,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(et,[2,157]),t(et,[2,162]),{7:196,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(et,[2,156]),t(et,[2,161]),{89:197,92:ut},t(Ct,[2,76]),{92:[2,116]},{27:198,28:i},{27:199,28:i},t(Ct,[2,91],{27:200,28:i}),{27:201,28:i},t(Ct,[2,92]),{7:203,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,64:Lt,65:47,66:48,68:36,70:23,71:24,72:25,79:202,82:204,83:y,86:k,90:w,95:T,96:C,97:F,99:205,100:xt,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{77:208,78:st,81:ot},{89:209,92:ut},t(Ct,[2,77]),{6:[1,211],7:210,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:[1,212],27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(St,[2,114]),{7:215,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:kt,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,64:wt,65:47,66:48,67:156,68:36,70:23,71:24,72:25,83:y,86:k,90:w,93:[1,213],94:214,95:T,96:C,97:F,101:154,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t([6,25],Dt,{60:218,56:[1,216],61:Rt}),t(At,[2,62]),t(At,[2,66],{46:[1,220],64:[1,219]}),t(At,[2,69]),t(It,[2,70]),t(It,[2,71]),t(It,[2,72]),t(It,[2,73]),{27:158,28:i},{7:215,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:kt,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,64:wt,65:47,66:48,67:156,68:36,70:23,71:24,72:25,83:y,86:k,90:w,94:153,95:T,96:C,97:F,98:Tt,101:154,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(et,[2,56]),{4:222,5:3,7:4,8:5,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,26:[1,221],27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,136,137,143,144,145,146,147,148],[2,196],{119:69,110:89,116:90,142:X}),{110:92,111:x,113:S,116:93,117:R,119:69,133:Z},t(_t,[2,197],{119:69,110:89,116:90,142:X,144:Y}),t(_t,[2,198],{119:69,110:89,116:90,142:X,144:Y}),t(_t,[2,199],{119:69,110:89,116:90,142:X,144:Y}),t(et,[2,200],{119:69,110:92,116:93}),t(Ot,[2,201],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{7:223,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(et,[2,203],{73:bt,74:bt,75:bt,76:bt,78:bt,81:bt,91:bt,92:bt}),{69:95,73:tt,74:nt,75:it,76:rt,77:101,78:st,81:ot,88:94,91:at,92:ct},{69:105,73:tt,74:nt,75:it,76:rt,77:101,78:st,81:ot,88:104,91:at,92:ct},t($t,ht),t(et,[2,204],{73:bt,74:bt,75:bt,76:bt,78:bt,81:bt,91:bt,92:bt}),t(et,[2,205]),t(et,[2,206]),{6:[1,226],7:224,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:[1,225],27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:227,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{24:228,25:gt,132:[1,229]},t(et,[2,141],{104:230,105:[1,231],106:[1,232]}),t(et,[2,155]),t(et,[2,163]),{25:[1,233],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},{127:234,129:235,130:jt},t(et,[2,104]),{7:237,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(yt,[2,107],{24:238,25:gt,73:bt,74:bt,75:bt,76:bt,78:bt,81:bt,91:bt,92:bt,87:[1,239]}),t(Ot,[2,148],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Ot,[2,52],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{6:P,109:[1,240]},{4:241,5:3,7:4,8:5,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t([6,25,61,98],Mt,{119:69,110:89,116:90,99:242,64:[1,243],100:xt,111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Bt,[2,122]),t([6,25,98],Dt,{60:244,61:Vt}),t(Pt,[2,131]),{7:215,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:kt,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,64:wt,65:47,66:48,67:156,68:36,70:23,71:24,72:25,83:y,86:k,90:w,94:246,95:T,96:C,97:F,101:154,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(Pt,[2,137]),t(Pt,[2,138]),t(Nt,[2,121]),{24:247,25:gt,110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},t(Ut,[2,151],{119:69,110:89,116:90,111:x,112:[1,248],113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Ut,[2,153],{119:69,110:89,116:90,111:x,112:[1,249],113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(et,[2,159]),t(Gt,[2,160],{119:69,110:89,116:90,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,133,136,137,142,143,144,145,146,147,148],[2,164],{118:[1,250]}),t(Ht,[2,167]),{27:168,28:i,51:169,65:170,66:171,83:y,96:ft,97:mt,121:251,123:167},t(Ht,[2,173],{61:[1,252]}),t(qt,[2,169]),t(qt,[2,170]),t(qt,[2,171]),t(qt,[2,172]),t(et,[2,166]),{7:253,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:254,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t([6,25,85],Dt,{60:255,61:Xt}),t(Wt,[2,99]),t(Wt,[2,42],{49:[1,257]}),t(Yt,[2,50],{46:[1,258]}),t(Wt,[2,47]),t(Yt,[2,51]),t(Kt,[2,48]),t(Kt,[2,49]),{38:[1,259],69:105,73:tt,74:nt,75:it,76:rt,77:101,78:st,81:ot,88:104,91:at,92:ct},t($t,bt),{6:P,34:[1,260]},t(U,[2,4]),t(zt,[2,208],{119:69,110:89,116:90,142:X,143:W,144:Y}),t(zt,[2,209],{119:69,110:89,116:90,142:X,143:W,144:Y}),t(_t,[2,210],{119:69,110:89,116:90,142:X,144:Y}),t(_t,[2,211],{119:69,110:89,116:90,142:X,144:Y}),t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,145,146,147,148],[2,212],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y}),t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,146,147],[2,213],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,148:Q}),t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,147],[2,214],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,148:Q}),t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,146,147,148],[2,215],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K}),t(Gt,[2,195],{119:69,110:89,116:90,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Gt,[2,194],{119:69,110:89,116:90,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(St,[2,111]),t(Ct,[2,87]),t(Ct,[2,88]),t(Ct,[2,89]),t(Ct,[2,90]),{80:[1,261]},{64:Lt,80:[2,95],99:262,100:xt,110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},{80:[2,96]},{7:263,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,80:[2,130],83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(Jt,[2,124]),t(Jt,Qt),t(Ct,[2,94]),t(St,[2,112]),t(Ot,[2,39],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{7:264,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:265,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(St,[2,117]),t([6,25,93],Dt,{60:266,61:Vt}),t(Pt,Mt,{119:69,110:89,116:90,64:[1,267],111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{57:268,58:v,59:b},t(Zt,en,{63:111,27:113,51:114,65:115,66:116,62:269,28:i,64:dt,83:y,96:ft,97:mt}),{6:tn,25:nn},t(At,[2,67]),{7:272,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(rn,[2,23]),{6:P,26:[1,273]},t(Ot,[2,202],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Ot,[2,216],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{7:274,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:275,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(Ot,[2,219],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(et,[2,193]),{7:276,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(et,[2,142],{105:[1,277]}),{24:278,25:gt},{24:281,25:gt,27:279,28:i,66:280,83:y},{127:282,129:235,130:jt},{26:[1,283],128:[1,284],129:285,130:jt},t(sn,[2,186]),{7:287,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,102:286,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(on,[2,105],{119:69,110:89,116:90,24:288,25:gt,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(et,[2,108]),{7:289,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(lt,[2,149]),{6:P,26:[1,290]},{7:291,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t([11,28,30,32,33,36,37,40,41,42,43,44,52,53,54,58,59,83,86,90,95,96,97,103,107,108,111,113,115,117,126,132,134,135,136,137,138,140,141],Qt,{6:an,25:an,61:an,98:an}),{6:cn,25:ln,98:[1,292]},t([6,25,26,93,98],en,{12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,9:18,10:19,45:21,39:22,70:23,71:24,72:25,57:28,68:36,131:37,110:39,114:40,116:41,65:47,66:48,29:49,35:51,27:62,51:63,119:69,31:72,8:122,67:156,7:215,101:295,11:n,28:i,30:r,32:s,33:o,36:a,37:c,40:l,41:h,42:u,43:p,44:d,52:f,53:m,54:g,58:v,59:b,64:wt,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,111:x,113:S,115:D,117:R,126:A,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V}),t(Zt,Dt,{60:296,61:Vt}),t(hn,[2,190]),{7:297,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:298,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:299,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(Ht,[2,168]),{27:168,28:i,51:169,65:170,66:171,83:y,96:ft,97:mt,123:300},t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,113,117,133],[2,175],{119:69,110:89,116:90,112:[1,301],118:[1,302],136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(un,[2,176],{119:69,110:89,116:90,112:[1,303],136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{6:pn,25:dn,85:[1,304]},t([6,25,26,85],en,{31:72,48:177,50:178,10:179,29:180,27:181,51:182,47:307,28:i,30:r,32:s,33:o,53:m,96:ft}),{7:308,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:[1,309],27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:310,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:[1,311],27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(lt,[2,31]),t(Et,[2,29]),t(Ct,[2,93]),{7:312,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,80:[2,128],83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{80:[2,129],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},t(Ot,[2,40],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{26:[1,313],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},{6:cn,25:ln,93:[1,314]},t(Pt,an),{24:315,25:gt},t(At,[2,63]),{27:113,28:i,51:114,62:316,63:111,64:dt,65:115,66:116,83:y,96:ft,97:mt},t(fn,pt,{62:110,63:111,27:113,51:114,65:115,66:116,55:317,28:i,64:dt,83:y,96:ft,97:mt}),t(At,[2,68],{119:69,110:89,116:90,111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(rn,[2,24]),{26:[1,318],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},t(Ot,[2,218],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{24:319,25:gt,110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},{24:320,25:gt},t(et,[2,143]),{24:321,25:gt},{24:322,25:gt},t(mn,[2,147]),{26:[1,323],128:[1,324],129:285,130:jt},t(et,[2,184]),{24:325,25:gt},t(sn,[2,187]),{24:326,25:gt,61:[1,327]},t(gn,[2,139],{119:69,110:89,116:90,111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(et,[2,106]),t(on,[2,109],{119:69,110:89,116:90,24:328,25:gt,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{109:[1,329]},{98:[1,330],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},t(Bt,[2,123]),{7:215,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,64:wt,65:47,66:48,67:156,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,101:331,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:215,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:kt,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,64:wt,65:47,66:48,67:156,68:36,70:23,71:24,72:25,83:y,86:k,90:w,94:332,95:T,96:C,97:F,101:154,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(Pt,[2,132]),{6:cn,25:ln,26:[1,333]},t(Gt,[2,152],{119:69,110:89,116:90,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Gt,[2,154],{119:69,110:89,116:90,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Gt,[2,165],{119:69,110:89,116:90,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Ht,[2,174]),{7:334,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:335,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:336,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(Bt,[2,97]),{10:179,27:181,28:i,29:180,30:r,31:72,32:s,33:o,47:337,48:177,50:178,51:182,53:m,96:ft},t(fn,Ft,{31:72,47:176,48:177,50:178,10:179,29:180,27:181,51:182,84:338,28:i,30:r,32:s,33:o,53:m,96:ft}),t(Wt,[2,100]),t(Wt,[2,43],{119:69,110:89,116:90,111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{7:339,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(Wt,[2,45],{119:69,110:89,116:90,111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{7:340,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{80:[2,127],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},t(et,[2,41]),t(St,[2,118]),t(et,[2,55]),t(At,[2,64]),t(Zt,Dt,{60:341,61:Rt}),t(et,[2,217]),t(hn,[2,191]),t(et,[2,144]),t(mn,[2,145]),t(mn,[2,146]),t(et,[2,182]),{24:342,25:gt},{26:[1,343]},t(sn,[2,188],{6:[1,344]}),{7:345,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(et,[2,110]),t(lt,[2,150]),t(lt,[2,126]),t(Pt,[2,133]),t(Zt,Dt,{60:346,61:Vt}),t(Pt,[2,134]),t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,133],[2,177],{119:69,110:89,116:90,118:[1,347],136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(un,[2,179],{119:69,110:89,116:90,112:[1,348],136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Ot,[2,178],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Wt,[2,101]),t(Zt,Dt,{60:349,61:Xt}),{26:[1,350],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},{26:[1,351],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},{6:tn,25:nn,26:[1,352]},{26:[1,353]},t(et,[2,185]),t(sn,[2,189]),t(gn,[2,140],{119:69,110:89,116:90,111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{6:cn,25:ln,26:[1,354]},{7:355,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:356,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{6:pn,25:dn,26:[1,357]},t(Wt,[2,44]),t(Wt,[2,46]),t(At,[2,65]),t(et,[2,183]),t(Pt,[2,135]),t(Ot,[2,180],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Ot,[2,181],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Wt,[2,102])],defaultActions:{60:[2,57],61:[2,58],96:[2,116],204:[2,96]},parseError:function(e,t){if(!t.recoverable)throw Error(e); +this.trace(e)},parse:function(e){function t(){var e;return e=f.lex()||p,"number"!=typeof e&&(e=n.symbols_[e]||e),e}var n=this,i=[0],r=[null],s=[],o=this.table,a="",c=0,l=0,h=0,u=2,p=1,d=s.slice.call(arguments,1),f=Object.create(this.lexer),m={yy:{}};for(var g in this.yy)Object.prototype.hasOwnProperty.call(this.yy,g)&&(m.yy[g]=this.yy[g]);f.setInput(e,m.yy),m.yy.lexer=f,m.yy.parser=this,f.yylloc===void 0&&(f.yylloc={});var v=f.yylloc;s.push(v);var b=f.options&&f.options.ranges;this.parseError="function"==typeof m.yy.parseError?m.yy.parseError:Object.getPrototypeOf(this).parseError;for(var y,k,w,T,C,F,E,N,L,x={};;){if(w=i[i.length-1],this.defaultActions[w]?T=this.defaultActions[w]:((null===y||y===void 0)&&(y=t()),T=o[w]&&o[w][y]),T===void 0||!T.length||!T[0]){var S="";L=[];for(F in o[w])this.terminals_[F]&&F>u&&L.push("'"+this.terminals_[F]+"'");S=f.showPosition?"Parse error on line "+(c+1)+":\n"+f.showPosition()+"\nExpecting "+L.join(", ")+", got '"+(this.terminals_[y]||y)+"'":"Parse error on line "+(c+1)+": Unexpected "+(y==p?"end of input":"'"+(this.terminals_[y]||y)+"'"),this.parseError(S,{text:f.match,token:this.terminals_[y]||y,line:f.yylineno,loc:v,expected:L})}if(T[0]instanceof Array&&T.length>1)throw Error("Parse Error: multiple actions possible at state: "+w+", token: "+y);switch(T[0]){case 1:i.push(y),r.push(f.yytext),s.push(f.yylloc),i.push(T[1]),y=null,k?(y=k,k=null):(l=f.yyleng,a=f.yytext,c=f.yylineno,v=f.yylloc,h>0&&h--);break;case 2:if(E=this.productions_[T[1]][1],x.$=r[r.length-E],x._$={first_line:s[s.length-(E||1)].first_line,last_line:s[s.length-1].last_line,first_column:s[s.length-(E||1)].first_column,last_column:s[s.length-1].last_column},b&&(x._$.range=[s[s.length-(E||1)].range[0],s[s.length-1].range[1]]),C=this.performAction.apply(x,[a,l,c,m.yy,T[1],r,s].concat(d)),C!==void 0)return C;E&&(i=i.slice(0,2*-1*E),r=r.slice(0,-1*E),s=s.slice(0,-1*E)),i.push(this.productions_[T[1]][0]),r.push(x.$),s.push(x._$),N=o[i[i.length-2]][i[i.length-1]],i.push(N);break;case 3:return!0}}return!0}};return e.prototype=vn,vn.Parser=e,new e}();return require!==void 0&&e!==void 0&&(e.parser=n,e.Parser=n.Parser,e.parse=function(){return n.parse.apply(n,arguments)},e.main=function(t){t[1]||(console.log("Usage: "+t[0]+" FILE"),process.exit(1));var n=require("fs").readFileSync(require("path").normalize(t[1]),"utf8");return e.parser.parse(n)},t!==void 0&&require.main===t&&e.main(process.argv.slice(1))),t.exports}(),require["./scope"]=function(){var e={},t={exports:e};return function(){var t,n=[].indexOf||function(e){for(var t=0,n=this.length;n>t;t++)if(t in this&&this[t]===e)return t;return-1};e.Scope=t=function(){function e(e,t,n,i){var r,s;this.parent=e,this.expressions=t,this.method=n,this.referencedVars=i,this.variables=[{name:"arguments",type:"arguments"}],this.positions={},this.parent||(this.utilities={}),this.root=null!=(r=null!=(s=this.parent)?s.root:void 0)?r:this}return e.prototype.add=function(e,t,n){return this.shared&&!n?this.parent.add(e,t,n):Object.prototype.hasOwnProperty.call(this.positions,e)?this.variables[this.positions[e]].type=t:this.positions[e]=this.variables.push({name:e,type:t})-1},e.prototype.namedMethod=function(){var e;return(null!=(e=this.method)?e.name:void 0)||!this.parent?this.method:this.parent.namedMethod()},e.prototype.find=function(e){return this.check(e)?!0:(this.add(e,"var"),!1)},e.prototype.parameter=function(e){return this.shared&&this.parent.check(e,!0)?void 0:this.add(e,"param")},e.prototype.check=function(e){var t;return!!(this.type(e)||(null!=(t=this.parent)?t.check(e):void 0))},e.prototype.temporary=function(e,t,n){return null==n&&(n=!1),n?(t+parseInt(e,36)).toString(36).replace(/\d/g,"a"):e+(t||"")},e.prototype.type=function(e){var t,n,i,r;for(i=this.variables,t=0,n=i.length;n>t;t++)if(r=i[t],r.name===e)return r.type;return null},e.prototype.freeVariable=function(e,t){var i,r,s;for(null==t&&(t={}),i=0;;){if(s=this.temporary(e,i,t.single),!(this.check(s)||n.call(this.root.referencedVars,s)>=0))break;i++}return(null!=(r=t.reserve)?r:!0)&&this.add(s,"var",!0),s},e.prototype.assign=function(e,t){return this.add(e,{value:t,assigned:!0},!0),this.hasAssignments=!0},e.prototype.hasDeclarations=function(){return!!this.declaredVariables().length},e.prototype.declaredVariables=function(){var e;return function(){var t,n,i,r;for(i=this.variables,r=[],t=0,n=i.length;n>t;t++)e=i[t],"var"===e.type&&r.push(e.name);return r}.call(this).sort()},e.prototype.assignedVariables=function(){var e,t,n,i,r;for(n=this.variables,i=[],e=0,t=n.length;t>e;e++)r=n[e],r.type.assigned&&i.push(r.name+" = "+r.type.value);return i},e}()}.call(this),t.exports}(),require["./nodes"]=function(){var e={},t={exports:e};return function(){var t,n,i,r,s,o,a,c,l,h,u,p,d,f,m,g,v,b,y,k,w,T,C,F,E,N,L,x,S,D,R,A,I,_,O,$,j,M,B,V,P,U,G,H,q,X,W,Y,K,z,J,Q,Z,et,tt,nt,it,rt,st,ot,at,ct,lt,ht,ut,pt,dt,ft,mt,gt,vt,bt,yt,kt=function(e,t){function n(){this.constructor=e}for(var i in t)wt.call(t,i)&&(e[i]=t[i]);return n.prototype=t.prototype,e.prototype=new n,e.__super__=t.prototype,e},wt={}.hasOwnProperty,Tt=[].indexOf||function(e){for(var t=0,n=this.length;n>t;t++)if(t in this&&this[t]===e)return t;return-1},Ct=[].slice;Error.stackTraceLimit=1/0,P=require("./scope").Scope,dt=require("./lexer"),$=dt.RESERVED,V=dt.STRICT_PROSCRIBED,ft=require("./helpers"),et=ft.compact,rt=ft.flatten,it=ft.extend,ht=ft.merge,tt=ft.del,gt=ft.starts,nt=ft.ends,mt=ft.some,Z=ft.addLocationDataFn,lt=ft.locationDataToString,vt=ft.throwSyntaxError,e.extend=it,e.addLocationDataFn=Z,Q=function(){return!0},D=function(){return!1},X=function(){return this},S=function(){return this.negated=!this.negated,this},e.CodeFragment=l=function(){function e(e,t){var n;this.code=""+t,this.locationData=null!=e?e.locationData:void 0,this.type=(null!=e?null!=(n=e.constructor)?n.name:void 0:void 0)||"unknown"}return e.prototype.toString=function(){return""+this.code+(this.locationData?": "+lt(this.locationData):"")},e}(),st=function(e){var t;return function(){var n,i,r;for(r=[],n=0,i=e.length;i>n;n++)t=e[n],r.push(t.code);return r}().join("")},e.Base=r=function(){function e(){}return e.prototype.compile=function(e,t){return st(this.compileToFragments(e,t))},e.prototype.compileToFragments=function(e,t){var n;return e=it({},e),t&&(e.level=t),n=this.unfoldSoak(e)||this,n.tab=e.indent,e.level!==L&&n.isStatement(e)?n.compileClosure(e):n.compileNode(e)},e.prototype.compileClosure=function(e){var n,i,r,a,l,h,u;return(a=this.jumps())&&a.error("cannot use a pure statement in an expression"),e.sharedScope=!0,r=new c([],s.wrap([this])),n=[],((i=this.contains(at))||this.contains(ct))&&(n=[new x("this")],i?(l="apply",n.push(new x("arguments"))):l="call",r=new z(r,[new t(new x(l))])),h=new o(r,n).compileNode(e),(r.isGenerator||(null!=(u=r.base)?u.isGenerator:void 0))&&(h.unshift(this.makeCode("(yield* ")),h.push(this.makeCode(")"))),h},e.prototype.cache=function(e,t,n){var r,s,o;return r=null!=n?n(this):this.isComplex(),r?(s=new x(e.scope.freeVariable("ref")),o=new i(s,this),t?[o.compileToFragments(e,t),[this.makeCode(s.value)]]:[o,s]):(s=t?this.compileToFragments(e,t):this,[s,s])},e.prototype.cacheToCodeFragments=function(e){return[st(e[0]),st(e[1])]},e.prototype.makeReturn=function(e){var t;return t=this.unwrapAll(),e?new o(new x(e+".push"),[t]):new M(t)},e.prototype.contains=function(e){var t;return t=void 0,this.traverseChildren(!1,function(n){return e(n)?(t=n,!1):void 0}),t},e.prototype.lastNonComment=function(e){var t;for(t=e.length;t--;)if(!(e[t]instanceof h))return e[t];return null},e.prototype.toString=function(e,t){var n;return null==e&&(e=""),null==t&&(t=this.constructor.name),n="\n"+e+t,this.soak&&(n+="?"),this.eachChild(function(t){return n+=t.toString(e+q)}),n},e.prototype.eachChild=function(e){var t,n,i,r,s,o,a,c;if(!this.children)return this;for(a=this.children,i=0,s=a.length;s>i;i++)if(t=a[i],this[t])for(c=rt([this[t]]),r=0,o=c.length;o>r;r++)if(n=c[r],e(n)===!1)return this;return this},e.prototype.traverseChildren=function(e,t){return this.eachChild(function(n){var i;return i=t(n),i!==!1?n.traverseChildren(e,t):void 0})},e.prototype.invert=function(){return new I("!",this)},e.prototype.unwrapAll=function(){var e;for(e=this;e!==(e=e.unwrap()););return e},e.prototype.children=[],e.prototype.isStatement=D,e.prototype.jumps=D,e.prototype.isComplex=Q,e.prototype.isChainable=D,e.prototype.isAssignable=D,e.prototype.unwrap=X,e.prototype.unfoldSoak=D,e.prototype.assigns=D,e.prototype.updateLocationDataIfMissing=function(e){return this.locationData?this:(this.locationData=e,this.eachChild(function(t){return t.updateLocationDataIfMissing(e)}))},e.prototype.error=function(e){return vt(e,this.locationData)},e.prototype.makeCode=function(e){return new l(this,e)},e.prototype.wrapInBraces=function(e){return[].concat(this.makeCode("("),e,this.makeCode(")"))},e.prototype.joinFragmentArrays=function(e,t){var n,i,r,s,o;for(n=[],r=s=0,o=e.length;o>s;r=++s)i=e[r],r&&n.push(this.makeCode(t)),n=n.concat(i);return n},e}(),e.Block=s=function(e){function t(e){this.expressions=et(rt(e||[]))}return kt(t,e),t.prototype.children=["expressions"],t.prototype.push=function(e){return this.expressions.push(e),this},t.prototype.pop=function(){return this.expressions.pop()},t.prototype.unshift=function(e){return this.expressions.unshift(e),this},t.prototype.unwrap=function(){return 1===this.expressions.length?this.expressions[0]:this},t.prototype.isEmpty=function(){return!this.expressions.length},t.prototype.isStatement=function(e){var t,n,i,r;for(r=this.expressions,n=0,i=r.length;i>n;n++)if(t=r[n],t.isStatement(e))return!0;return!1},t.prototype.jumps=function(e){var t,n,i,r,s;for(s=this.expressions,n=0,r=s.length;r>n;n++)if(t=s[n],i=t.jumps(e))return i},t.prototype.makeReturn=function(e){var t,n;for(n=this.expressions.length;n--;)if(t=this.expressions[n],!(t instanceof h)){this.expressions[n]=t.makeReturn(e),t instanceof M&&!t.expression&&this.expressions.splice(n,1);break}return this},t.prototype.compileToFragments=function(e,n){return null==e&&(e={}),e.scope?t.__super__.compileToFragments.call(this,e,n):this.compileRoot(e)},t.prototype.compileNode=function(e){var n,i,r,s,o,a,c,l,h;for(this.tab=e.indent,h=e.level===L,i=[],l=this.expressions,s=o=0,a=l.length;a>o;s=++o)c=l[s],c=c.unwrapAll(),c=c.unfoldSoak(e)||c,c instanceof t?i.push(c.compileNode(e)):h?(c.front=!0,r=c.compileToFragments(e),c.isStatement(e)||(r.unshift(this.makeCode(""+this.tab)),r.push(this.makeCode(";"))),i.push(r)):i.push(c.compileToFragments(e,F));return h?this.spaced?[].concat(this.joinFragmentArrays(i,"\n\n"),this.makeCode("\n")):this.joinFragmentArrays(i,"\n"):(n=i.length?this.joinFragmentArrays(i,", "):[this.makeCode("void 0")],i.length>1&&e.level>=F?this.wrapInBraces(n):n)},t.prototype.compileRoot=function(e){var t,n,i,r,s,o,a,c,l,u,p;for(e.indent=e.bare?"":q,e.level=L,this.spaced=!0,e.scope=new P(null,this,null,null!=(l=e.referencedVars)?l:[]),u=e.locals||[],r=0,s=u.length;s>r;r++)o=u[r],e.scope.parameter(o);return a=[],e.bare||(c=function(){var e,n,r,s;for(r=this.expressions,s=[],i=e=0,n=r.length;n>e&&(t=r[i],t.unwrap()instanceof h);i=++e)s.push(t);return s}.call(this),p=this.expressions.slice(c.length),this.expressions=c,c.length&&(a=this.compileNode(ht(e,{indent:""})),a.push(this.makeCode("\n"))),this.expressions=p),n=this.compileWithDeclarations(e),e.bare?n:[].concat(a,this.makeCode("(function() {\n"),n,this.makeCode("\n}).call(this);\n"))},t.prototype.compileWithDeclarations=function(e){var t,n,i,r,s,o,a,c,l,u,p,d,f,m;for(r=[],c=[],l=this.expressions,s=o=0,a=l.length;a>o&&(i=l[s],i=i.unwrap(),i instanceof h||i instanceof x);s=++o);return e=ht(e,{level:L}),s&&(d=this.expressions.splice(s,9e9),u=[this.spaced,!1],m=u[0],this.spaced=u[1],p=[this.compileNode(e),m],r=p[0],this.spaced=p[1],this.expressions=d),c=this.compileNode(e),f=e.scope,f.expressions===this&&(n=e.scope.hasDeclarations(),t=f.hasAssignments,n||t?(s&&r.push(this.makeCode("\n")),r.push(this.makeCode(this.tab+"var ")),n&&r.push(this.makeCode(f.declaredVariables().join(", "))),t&&(n&&r.push(this.makeCode(",\n"+(this.tab+q))),r.push(this.makeCode(f.assignedVariables().join(",\n"+(this.tab+q))))),r.push(this.makeCode(";\n"+(this.spaced?"\n":"")))):r.length&&c.length&&r.push(this.makeCode("\n"))),r.concat(c)},t.wrap=function(e){return 1===e.length&&e[0]instanceof t?e[0]:new t(e)},t}(r),e.Literal=x=function(e){function t(e){this.value=e}return kt(t,e),t.prototype.makeReturn=function(){return this.isStatement()?this:t.__super__.makeReturn.apply(this,arguments)},t.prototype.isAssignable=function(){return g.test(this.value)},t.prototype.isStatement=function(){var e;return"break"===(e=this.value)||"continue"===e||"debugger"===e},t.prototype.isComplex=D,t.prototype.assigns=function(e){return e===this.value},t.prototype.jumps=function(e){return"break"!==this.value||(null!=e?e.loop:void 0)||(null!=e?e.block:void 0)?"continue"!==this.value||(null!=e?e.loop:void 0)?void 0:this:this},t.prototype.compileNode=function(e){var t,n,i;return n="this"===this.value?(null!=(i=e.scope.method)?i.bound:void 0)?e.scope.method.context:this.value:this.value.reserved?'"'+this.value+'"':this.value,t=this.isStatement()?""+this.tab+n+";":n,[this.makeCode(t)]},t.prototype.toString=function(){return' "'+this.value+'"'},t}(r),e.Undefined=function(e){function t(){return t.__super__.constructor.apply(this,arguments)}return kt(t,e),t.prototype.isAssignable=D,t.prototype.isComplex=D,t.prototype.compileNode=function(e){return[this.makeCode(e.level>=T?"(void 0)":"void 0")]},t}(r),e.Null=function(e){function t(){return t.__super__.constructor.apply(this,arguments)}return kt(t,e),t.prototype.isAssignable=D,t.prototype.isComplex=D,t.prototype.compileNode=function(){return[this.makeCode("null")]},t}(r),e.Bool=function(e){function t(e){this.val=e}return kt(t,e),t.prototype.isAssignable=D,t.prototype.isComplex=D,t.prototype.compileNode=function(){return[this.makeCode(this.val)]},t}(r),e.Return=M=function(e){function t(e){this.expression=e}return kt(t,e),t.prototype.children=["expression"],t.prototype.isStatement=Q,t.prototype.makeReturn=X,t.prototype.jumps=X,t.prototype.compileToFragments=function(e,n){var i,r;return i=null!=(r=this.expression)?r.makeReturn():void 0,!i||i instanceof t?t.__super__.compileToFragments.call(this,e,n):i.compileToFragments(e,n)},t.prototype.compileNode=function(e){var t,n,i;return t=[],n=null!=(i=this.expression)?"function"==typeof i.isYieldReturn?i.isYieldReturn():void 0:void 0,n||t.push(this.makeCode(this.tab+("return"+(this.expression?" ":"")))),this.expression&&(t=t.concat(this.expression.compileToFragments(e,N))),n||t.push(this.makeCode(";")),t},t}(r),e.Value=z=function(e){function t(e,n,i){return!n&&e instanceof t?e:(this.base=e,this.properties=n||[],i&&(this[i]=!0),this)}return kt(t,e),t.prototype.children=["base","properties"],t.prototype.add=function(e){return this.properties=this.properties.concat(e),this},t.prototype.hasProperties=function(){return!!this.properties.length},t.prototype.bareLiteral=function(e){return!this.properties.length&&this.base instanceof e},t.prototype.isArray=function(){return this.bareLiteral(n)},t.prototype.isRange=function(){return this.bareLiteral(j)},t.prototype.isComplex=function(){return this.hasProperties()||this.base.isComplex()},t.prototype.isAssignable=function(){return this.hasProperties()||this.base.isAssignable()},t.prototype.isSimpleNumber=function(){return this.bareLiteral(x)&&B.test(this.base.value)},t.prototype.isString=function(){return this.bareLiteral(x)&&b.test(this.base.value)},t.prototype.isRegex=function(){return this.bareLiteral(x)&&v.test(this.base.value)},t.prototype.isAtomic=function(){var e,t,n,i;for(i=this.properties.concat(this.base),e=0,t=i.length;t>e;e++)if(n=i[e],n.soak||n instanceof o)return!1;return!0},t.prototype.isNotCallable=function(){return this.isSimpleNumber()||this.isString()||this.isRegex()||this.isArray()||this.isRange()||this.isSplice()||this.isObject()},t.prototype.isStatement=function(e){return!this.properties.length&&this.base.isStatement(e)},t.prototype.assigns=function(e){return!this.properties.length&&this.base.assigns(e)},t.prototype.jumps=function(e){return!this.properties.length&&this.base.jumps(e)},t.prototype.isObject=function(e){return this.properties.length?!1:this.base instanceof A&&(!e||this.base.generated)},t.prototype.isSplice=function(){var e,t;return t=this.properties,e=t[t.length-1],e instanceof U},t.prototype.looksStatic=function(e){var t;return this.base.value===e&&1===this.properties.length&&"prototype"!==(null!=(t=this.properties[0].name)?t.value:void 0)},t.prototype.unwrap=function(){return this.properties.length?this:this.base},t.prototype.cacheReference=function(e){var n,r,s,o,a;return a=this.properties,s=a[a.length-1],2>this.properties.length&&!this.base.isComplex()&&!(null!=s?s.isComplex():void 0)?[this,this]:(n=new t(this.base,this.properties.slice(0,-1)),n.isComplex()&&(r=new x(e.scope.freeVariable("base")),n=new t(new O(new i(r,n)))),s?(s.isComplex()&&(o=new x(e.scope.freeVariable("name")),s=new w(new i(o,s.index)),o=new w(o)),[n.add(s),new t(r||n.base,[o||s])]):[n,r])},t.prototype.compileNode=function(e){var t,n,i,r,s;for(this.base.front=this.front,s=this.properties,t=this.base.compileToFragments(e,s.length?T:null),(this.base instanceof O||s.length)&&B.test(st(t))&&t.push(this.makeCode(".")),n=0,i=s.length;i>n;n++)r=s[n],t.push.apply(t,r.compileToFragments(e));return t},t.prototype.unfoldSoak=function(e){return null!=this.unfoldedSoak?this.unfoldedSoak:this.unfoldedSoak=function(n){return function(){var r,s,o,a,c,l,h,p,d,f;if(o=n.base.unfoldSoak(e))return(p=o.body.properties).push.apply(p,n.properties),o;for(d=n.properties,s=a=0,c=d.length;c>a;s=++a)if(l=d[s],l.soak)return l.soak=!1,r=new t(n.base,n.properties.slice(0,s)),f=new t(n.base,n.properties.slice(s)),r.isComplex()&&(h=new x(e.scope.freeVariable("ref")),r=new O(new i(h,r)),f.base=h),new y(new u(r),f,{soak:!0});return!1}}(this)()},t}(r),e.Comment=h=function(e){function t(e){this.comment=e}return kt(t,e),t.prototype.isStatement=Q,t.prototype.makeReturn=X,t.prototype.compileNode=function(e,t){var n,i;return i=this.comment.replace(/^(\s*)#(?=\s)/gm,"$1 *"),n="/*"+ut(i,this.tab)+(Tt.call(i,"\n")>=0?"\n"+this.tab:"")+" */",(t||e.level)===L&&(n=e.indent+n),[this.makeCode("\n"),this.makeCode(n)]},t}(r),e.Call=o=function(e){function n(e,t,n){this.args=null!=t?t:[],this.soak=n,this.isNew=!1,this.isSuper="super"===e,this.variable=this.isSuper?null:e,e instanceof z&&e.isNotCallable()&&e.error("literal is not a function")}return kt(n,e),n.prototype.children=["variable","args"],n.prototype.newInstance=function(){var e,t;return e=(null!=(t=this.variable)?t.base:void 0)||this.variable,e instanceof n&&!e.isNew?e.newInstance():this.isNew=!0,this},n.prototype.superReference=function(e){var n,r,s,o,a,c,l,h;return a=e.scope.namedMethod(),(null!=a?a.klass:void 0)?(o=a.klass,c=a.name,h=a.variable,o.isComplex()&&(s=new x(e.scope.parent.freeVariable("base")),r=new z(new O(new i(s,o))),h.base=r,h.properties.splice(0,o.properties.length)),(c.isComplex()||c instanceof w&&c.index.isAssignable())&&(l=new x(e.scope.parent.freeVariable("name")),c=new w(new i(l,c.index)),h.properties.pop(),h.properties.push(c)),n=[new t(new x("__super__"))],a["static"]&&n.push(new t(new x("constructor"))),n.push(null!=l?new w(l):c),new z(null!=s?s:o,n).compile(e)):(null!=a?a.ctor:void 0)?a.name+".__super__.constructor":this.error("cannot call super outside of an instance method.")},n.prototype.superThis=function(e){var t;return t=e.scope.method,t&&!t.klass&&t.context||"this"},n.prototype.unfoldSoak=function(e){var t,i,r,s,o,a,c,l,h;if(this.soak){if(this.variable){if(i=bt(e,this,"variable"))return i;c=new z(this.variable).cacheReference(e),s=c[0],h=c[1]}else s=new x(this.superReference(e)),h=new z(s);return h=new n(h,this.args),h.isNew=this.isNew,s=new x("typeof "+s.compile(e)+' === "function"'),new y(s,new z(h),{soak:!0})}for(t=this,a=[];;)if(t.variable instanceof n)a.push(t),t=t.variable;else{if(!(t.variable instanceof z))break;if(a.push(t),!((t=t.variable.base)instanceof n))break}for(l=a.reverse(),r=0,o=l.length;o>r;r++)t=l[r],i&&(t.variable instanceof n?t.variable=i:t.variable.base=i),i=bt(e,t,"variable");return i},n.prototype.compileNode=function(e){var t,n,i,r,s,o,a,c,l,h;if(null!=(l=this.variable)&&(l.front=this.front),r=G.compileSplattedArray(e,this.args,!0),r.length)return this.compileSplat(e,r);for(i=[],h=this.args,n=o=0,a=h.length;a>o;n=++o)t=h[n],n&&i.push(this.makeCode(", ")),i.push.apply(i,t.compileToFragments(e,F));return s=[],this.isSuper?(c=this.superReference(e)+(".call("+this.superThis(e)),i.length&&(c+=", "),s.push(this.makeCode(c))):(this.isNew&&s.push(this.makeCode("new ")),s.push.apply(s,this.variable.compileToFragments(e,T)),s.push(this.makeCode("("))),s.push.apply(s,i),s.push(this.makeCode(")")),s},n.prototype.compileSplat=function(e,t){var n,i,r,s,o,a;return this.isSuper?[].concat(this.makeCode(this.superReference(e)+".apply("+this.superThis(e)+", "),t,this.makeCode(")")):this.isNew?(s=this.tab+q,[].concat(this.makeCode("(function(func, args, ctor) {\n"+s+"ctor.prototype = func.prototype;\n"+s+"var child = new ctor, result = func.apply(child, args);\n"+s+"return Object(result) === result ? result : child;\n"+this.tab+"})("),this.variable.compileToFragments(e,F),this.makeCode(", "),t,this.makeCode(", function(){})"))):(n=[],i=new z(this.variable),(o=i.properties.pop())&&i.isComplex()?(a=e.scope.freeVariable("ref"),n=n.concat(this.makeCode("("+a+" = "),i.compileToFragments(e,F),this.makeCode(")"),o.compileToFragments(e))):(r=i.compileToFragments(e,T),B.test(st(r))&&(r=this.wrapInBraces(r)),o?(a=st(r),r.push.apply(r,o.compileToFragments(e))):a="null",n=n.concat(r)),n=n.concat(this.makeCode(".apply("+a+", "),t,this.makeCode(")")))},n}(r),e.Extends=d=function(e){function t(e,t){this.child=e,this.parent=t}return kt(t,e),t.prototype.children=["child","parent"],t.prototype.compileToFragments=function(e){return new o(new z(new x(yt("extend",e))),[this.child,this.parent]).compileToFragments(e)},t}(r),e.Access=t=function(e){function t(e,t){this.name=e,this.name.asKey=!0,this.soak="soak"===t}return kt(t,e),t.prototype.children=["name"],t.prototype.compileToFragments=function(e){var t;return t=this.name.compileToFragments(e),g.test(st(t))?t.unshift(this.makeCode(".")):(t.unshift(this.makeCode("[")),t.push(this.makeCode("]"))),t},t.prototype.isComplex=D,t}(r),e.Index=w=function(e){function t(e){this.index=e}return kt(t,e),t.prototype.children=["index"],t.prototype.compileToFragments=function(e){return[].concat(this.makeCode("["),this.index.compileToFragments(e,N),this.makeCode("]"))},t.prototype.isComplex=function(){return this.index.isComplex()},t}(r),e.Range=j=function(e){function t(e,t,n){this.from=e,this.to=t,this.exclusive="exclusive"===n,this.equals=this.exclusive?"":"="}return kt(t,e),t.prototype.children=["from","to"],t.prototype.compileVariables=function(e){var t,n,i,r,s,o;return e=ht(e,{top:!0}),t=tt(e,"isComplex"),n=this.cacheToCodeFragments(this.from.cache(e,F,t)),this.fromC=n[0],this.fromVar=n[1],i=this.cacheToCodeFragments(this.to.cache(e,F,t)),this.toC=i[0],this.toVar=i[1],(o=tt(e,"step"))&&(r=this.cacheToCodeFragments(o.cache(e,F,t)),this.step=r[0],this.stepVar=r[1]),s=[this.fromVar.match(R),this.toVar.match(R)],this.fromNum=s[0],this.toNum=s[1],this.stepVar?this.stepNum=this.stepVar.match(R):void 0},t.prototype.compileNode=function(e){var t,n,i,r,s,o,a,c,l,h,u,p,d,f;return this.fromVar||this.compileVariables(e),e.index?(a=this.fromNum&&this.toNum,s=tt(e,"index"),o=tt(e,"name"),l=o&&o!==s,f=s+" = "+this.fromC,this.toC!==this.toVar&&(f+=", "+this.toC),this.step!==this.stepVar&&(f+=", "+this.step),h=[s+" <"+this.equals,s+" >"+this.equals],c=h[0],r=h[1],n=this.stepNum?pt(this.stepNum[0])>0?c+" "+this.toVar:r+" "+this.toVar:a?(u=[pt(this.fromNum[0]),pt(this.toNum[0])],i=u[0],d=u[1],u,d>=i?c+" "+d:r+" "+d):(t=this.stepVar?this.stepVar+" > 0":this.fromVar+" <= "+this.toVar,t+" ? "+c+" "+this.toVar+" : "+r+" "+this.toVar),p=this.stepVar?s+" += "+this.stepVar:a?l?d>=i?"++"+s:"--"+s:d>=i?s+"++":s+"--":l?t+" ? ++"+s+" : --"+s:t+" ? "+s+"++ : "+s+"--",l&&(f=o+" = "+f),l&&(p=o+" = "+p),[this.makeCode(f+"; "+n+"; "+p)]):this.compileArray(e)},t.prototype.compileArray=function(e){var t,n,i,r,s,o,a,c,l,h,u,p,d;return this.fromNum&&this.toNum&&20>=Math.abs(this.fromNum-this.toNum)?(l=function(){p=[];for(var e=h=+this.fromNum,t=+this.toNum;t>=h?t>=e:e>=t;t>=h?e++:e--)p.push(e);return p}.apply(this),this.exclusive&&l.pop(),[this.makeCode("["+l.join(", ")+"]")]):(o=this.tab+q,s=e.scope.freeVariable("i",{single:!0}),u=e.scope.freeVariable("results"),c="\n"+o+u+" = [];",this.fromNum&&this.toNum?(e.index=s,n=st(this.compileNode(e))):(d=s+" = "+this.fromC+(this.toC!==this.toVar?", "+this.toC:""),i=this.fromVar+" <= "+this.toVar,n="var "+d+"; "+i+" ? "+s+" <"+this.equals+" "+this.toVar+" : "+s+" >"+this.equals+" "+this.toVar+"; "+i+" ? "+s+"++ : "+s+"--"),a="{ "+u+".push("+s+"); }\n"+o+"return "+u+";\n"+e.indent,r=function(e){return null!=e?e.contains(at):void 0},(r(this.from)||r(this.to))&&(t=", arguments"),[this.makeCode("(function() {"+c+"\n"+o+"for ("+n+")"+a+"}).apply(this"+(null!=t?t:"")+")")])},t}(r),e.Slice=U=function(e){function t(e){this.range=e,t.__super__.constructor.call(this)}return kt(t,e),t.prototype.children=["range"],t.prototype.compileNode=function(e){var t,n,i,r,s,o,a;return s=this.range,o=s.to,i=s.from,r=i&&i.compileToFragments(e,N)||[this.makeCode("0")],o&&(t=o.compileToFragments(e,N),n=st(t),(this.range.exclusive||-1!==+n)&&(a=", "+(this.range.exclusive?n:B.test(n)?""+(+n+1):(t=o.compileToFragments(e,T),"+"+st(t)+" + 1 || 9e9")))),[this.makeCode(".slice("+st(r)+(a||"")+")")]},t}(r),e.Obj=A=function(e){function n(e,t){this.generated=null!=t?t:!1,this.objects=this.properties=e||[]}return kt(n,e),n.prototype.children=["properties"],n.prototype.compileNode=function(e){var n,r,s,o,a,c,l,u,p,d,f,m,g,v,b,y,k,w,T,C,F;if(T=this.properties,this.generated)for(l=0,g=T.length;g>l;l++)y=T[l],y instanceof z&&y.error("cannot have an implicit value in an implicit object");for(r=p=0,v=T.length;v>p&&(w=T[r],!((w.variable||w).base instanceof O));r=++p);for(s=T.length>r,a=e.indent+=q,m=this.lastNonComment(this.properties),n=[],s&&(k=e.scope.freeVariable("obj"),n.push(this.makeCode("(\n"+a+k+" = "))),n.push(this.makeCode("{"+(0===T.length||0===r?"}":"\n"))),o=f=0,b=T.length;b>f;o=++f)w=T[o],o===r&&(0!==o&&n.push(this.makeCode("\n"+a+"}")),n.push(this.makeCode(",\n"))),u=o===T.length-1||o===r-1?"":w===m||w instanceof h?"\n":",\n",c=w instanceof h?"":a,s&&r>o&&(c+=q),w instanceof i&&("object"!==w.context&&w.operatorToken.error("unexpected "+w.operatorToken.value),w.variable instanceof z&&w.variable.hasProperties()&&w.variable.error("invalid object key")),w instanceof z&&w["this"]&&(w=new i(w.properties[0].name,w,"object")),w instanceof h||(r>o?(w instanceof i||(w=new i(w,w,"object")),(w.variable.base||w.variable).asKey=!0):(w instanceof i?(d=w.variable,F=w.value):(C=w.base.cache(e),d=C[0],F=C[1]),w=new i(new z(new x(k),[new t(d)]),F))),c&&n.push(this.makeCode(c)),n.push.apply(n,w.compileToFragments(e,L)),u&&n.push(this.makeCode(u));return s?n.push(this.makeCode(",\n"+a+k+"\n"+this.tab+")")):0!==T.length&&n.push(this.makeCode("\n"+this.tab+"}")),this.front&&!s?this.wrapInBraces(n):n},n.prototype.assigns=function(e){var t,n,i,r;for(r=this.properties,t=0,n=r.length;n>t;t++)if(i=r[t],i.assigns(e))return!0;return!1},n}(r),e.Arr=n=function(e){function t(e){this.objects=e||[]}return kt(t,e),t.prototype.children=["objects"],t.prototype.compileNode=function(e){var t,n,i,r,s,o,a;if(!this.objects.length)return[this.makeCode("[]")];if(e.indent+=q,t=G.compileSplattedArray(e,this.objects),t.length)return t;for(t=[],n=function(){var t,n,i,r;for(i=this.objects,r=[],t=0,n=i.length;n>t;t++)a=i[t],r.push(a.compileToFragments(e,F));return r}.call(this),r=s=0,o=n.length;o>s;r=++s)i=n[r],r&&t.push(this.makeCode(", ")),t.push.apply(t,i);return st(t).indexOf("\n")>=0?(t.unshift(this.makeCode("[\n"+e.indent)),t.push(this.makeCode("\n"+this.tab+"]"))):(t.unshift(this.makeCode("[")),t.push(this.makeCode("]"))),t},t.prototype.assigns=function(e){var t,n,i,r;for(r=this.objects,t=0,n=r.length;n>t;t++)if(i=r[t],i.assigns(e))return!0;return!1},t}(r),e.Class=a=function(e){function n(e,t,n){this.variable=e,this.parent=t,this.body=null!=n?n:new s,this.boundFuncs=[],this.body.classBody=!0}return kt(n,e),n.prototype.children=["variable","parent","body"],n.prototype.determineName=function(){var e,n,i;return this.variable?(n=this.variable.properties,i=n[n.length-1],e=i?i instanceof t&&i.name.value:this.variable.base.value,Tt.call(V,e)>=0&&this.variable.error("class variable name may not be "+e),e&&(e=g.test(e)&&e)):null},n.prototype.setContext=function(e){return this.body.traverseChildren(!1,function(t){return t.classBody?!1:t instanceof x&&"this"===t.value?t.value=e:t instanceof c&&t.bound?t.context=e:void 0})},n.prototype.addBoundFunctions=function(e){var n,i,r,s,o;for(o=this.boundFuncs,i=0,r=o.length;r>i;i++)n=o[i],s=new z(new x("this"),[new t(n)]).compile(e),this.ctor.body.unshift(new x(s+" = "+yt("bind",e)+"("+s+", this)"))},n.prototype.addProperties=function(e,n,r){var s,o,a,l,h,u;return u=e.base.properties.slice(0),l=function(){var e;for(e=[];o=u.shift();)o instanceof i&&(a=o.variable.base,delete o.context,h=o.value,"constructor"===a.value?(this.ctor&&o.error("cannot define more than one constructor in a class"),h.bound&&o.error("cannot define a constructor as a bound function"),h instanceof c?o=this.ctor=h:(this.externalCtor=r.classScope.freeVariable("class"),o=new i(new x(this.externalCtor),h))):o.variable["this"]?h["static"]=!0:(s=a.isComplex()?new w(a):new t(a),o.variable=new z(new x(n),[new t(new x("prototype")),s]),h instanceof c&&h.bound&&(this.boundFuncs.push(a),h.bound=!1))),e.push(o);return e}.call(this),et(l)},n.prototype.walkBody=function(e,t){return this.traverseChildren(!1,function(r){return function(o){var a,c,l,h,u,p,d;if(a=!0,o instanceof n)return!1;if(o instanceof s){for(d=c=o.expressions,l=h=0,u=d.length;u>h;l=++h)p=d[l],p instanceof i&&p.variable.looksStatic(e)?p.value["static"]=!0:p instanceof z&&p.isObject(!0)&&(a=!1,c[l]=r.addProperties(p,e,t));o.expressions=c=rt(c)}return a&&!(o instanceof n)}}(this))},n.prototype.hoistDirectivePrologue=function(){var e,t,n;for(t=0,e=this.body.expressions;(n=e[t])&&n instanceof h||n instanceof z&&n.isString();)++t;return this.directives=e.splice(0,t)},n.prototype.ensureConstructor=function(e){return this.ctor||(this.ctor=new c,this.externalCtor?this.ctor.body.push(new x(this.externalCtor+".apply(this, arguments)")):this.parent&&this.ctor.body.push(new x(e+".__super__.constructor.apply(this, arguments)")),this.ctor.body.makeReturn(),this.body.expressions.unshift(this.ctor)),this.ctor.ctor=this.ctor.name=e,this.ctor.klass=null,this.ctor.noReturn=!0},n.prototype.compileNode=function(e){var t,n,r,a,l,h,u,p,f;return(a=this.body.jumps())&&a.error("Class bodies cannot contain pure statements"),(n=this.body.contains(at))&&n.error("Class bodies shouldn't reference arguments"),u=this.determineName()||"_Class",u.reserved&&(u="_"+u),h=new x(u),r=new c([],s.wrap([this.body])),t=[],e.classScope=r.makeScope(e.scope),this.hoistDirectivePrologue(),this.setContext(u),this.walkBody(u,e),this.ensureConstructor(u),this.addBoundFunctions(e),this.body.spaced=!0,this.body.expressions.push(h),this.parent&&(f=new x(e.classScope.freeVariable("superClass",{reserve:!1})),this.body.expressions.unshift(new d(h,f)),r.params.push(new _(f)),t.push(this.parent)),(p=this.body.expressions).unshift.apply(p,this.directives),l=new O(new o(r,t)),this.variable&&(l=new i(this.variable,l)),l.compileToFragments(e)},n}(r),e.Assign=i=function(e){function n(e,t,n,i){var r,s,o;this.variable=e,this.value=t,this.context=n,null==i&&(i={}),this.param=i.param,this.subpattern=i.subpattern,this.operatorToken=i.operatorToken,o=s=this.variable.unwrapAll().value,r=Tt.call(V,o)>=0,r&&"object"!==this.context&&this.variable.error('variable name may not be "'+s+'"') +}return kt(n,e),n.prototype.children=["variable","value"],n.prototype.isStatement=function(e){return(null!=e?e.level:void 0)===L&&null!=this.context&&Tt.call(this.context,"?")>=0},n.prototype.assigns=function(e){return this["object"===this.context?"value":"variable"].assigns(e)},n.prototype.unfoldSoak=function(e){return bt(e,this,"variable")},n.prototype.compileNode=function(e){var t,n,i,r,s,o,a,l,h,u,p,d,f,m;if(i=this.variable instanceof z){if(this.variable.isArray()||this.variable.isObject())return this.compilePatternMatch(e);if(this.variable.isSplice())return this.compileSplice(e);if("||="===(l=this.context)||"&&="===l||"?="===l)return this.compileConditional(e);if("**="===(h=this.context)||"//="===h||"%%="===h)return this.compileSpecialMath(e)}return this.value instanceof c&&(this.value["static"]?(this.value.klass=this.variable.base,this.value.name=this.variable.properties[0],this.value.variable=this.variable):(null!=(u=this.variable.properties)?u.length:void 0)>=2&&(p=this.variable.properties,o=p.length>=3?Ct.call(p,0,r=p.length-2):(r=0,[]),a=p[r++],s=p[r++],"prototype"===(null!=(d=a.name)?d.value:void 0)&&(this.value.klass=new z(this.variable.base,o),this.value.name=s,this.value.variable=this.variable))),this.context||(m=this.variable.unwrapAll(),m.isAssignable()||this.variable.error('"'+this.variable.compile(e)+'" cannot be assigned'),("function"==typeof m.hasProperties?m.hasProperties():void 0)||(this.param?e.scope.add(m.value,"var"):e.scope.find(m.value))),f=this.value.compileToFragments(e,F),i&&this.variable.base instanceof A&&(this.variable.front=!0),n=this.variable.compileToFragments(e,F),"object"===this.context?n.concat(this.makeCode(": "),f):(t=n.concat(this.makeCode(" "+(this.context||"=")+" "),f),F>=e.level?t:this.wrapInBraces(t))},n.prototype.compilePatternMatch=function(e){var i,r,s,o,a,c,l,h,u,d,f,m,v,b,y,k,T,C,N,S,D,R,A,_,O,j,M,B;if(_=e.level===L,j=this.value,y=this.variable.base.objects,!(k=y.length))return s=j.compileToFragments(e),e.level>=E?this.wrapInBraces(s):s;if(b=y[0],1===k&&b instanceof p&&b.error("Destructuring assignment has no target"),u=this.variable.isObject(),_&&1===k&&!(b instanceof G))return o=null,b instanceof n&&"object"===b.context?(C=b,N=C.variable,h=N.base,b=C.value,b instanceof n&&(o=b.value,b=b.variable)):(b instanceof n&&(o=b.value,b=b.variable),h=u?b["this"]?b.properties[0].name:b:new x(0)),i=g.test(h.unwrap().value),j=new z(j),j.properties.push(new(i?t:w)(h)),S=b.unwrap().value,Tt.call($,S)>=0&&b.error("assignment to a reserved word: "+b.compile(e)),o&&(j=new I("?",j,o)),new n(b,j,null,{param:this.param}).compileToFragments(e,L);for(M=j.compileToFragments(e,F),B=st(M),r=[],a=!1,(!g.test(B)||this.variable.assigns(B))&&(r.push([this.makeCode((T=e.scope.freeVariable("ref"))+" = ")].concat(Ct.call(M))),M=[this.makeCode(T)],B=T),l=f=0,m=y.length;m>f;l=++f){if(b=y[l],h=l,!a&&b instanceof G)v=b.name.unwrap().value,b=b.unwrap(),O=k+" <= "+B+".length ? "+yt("slice",e)+".call("+B+", "+l,(A=k-l-1)?(d=e.scope.freeVariable("i",{single:!0}),O+=", "+d+" = "+B+".length - "+A+") : ("+d+" = "+l+", [])"):O+=") : []",O=new x(O),a=d+"++";else{if(!a&&b instanceof p){(A=k-l-1)&&(1===A?a=B+".length - 1":(d=e.scope.freeVariable("i",{single:!0}),O=new x(d+" = "+B+".length - "+A),a=d+"++",r.push(O.compileToFragments(e,F))));continue}(b instanceof G||b instanceof p)&&b.error("multiple splats/expansions are disallowed in an assignment"),o=null,b instanceof n&&"object"===b.context?(D=b,R=D.variable,h=R.base,b=D.value,b instanceof n&&(o=b.value,b=b.variable)):(b instanceof n&&(o=b.value,b=b.variable),h=u?b["this"]?b.properties[0].name:b:new x(a||h)),v=b.unwrap().value,i=g.test(h.unwrap().value),O=new z(new x(B),[new(i?t:w)(h)]),o&&(O=new I("?",O,o))}null!=v&&Tt.call($,v)>=0&&b.error("assignment to a reserved word: "+b.compile(e)),r.push(new n(b,O,null,{param:this.param,subpattern:!0}).compileToFragments(e,F))}return _||this.subpattern||r.push(M),c=this.joinFragmentArrays(r,", "),F>e.level?c:this.wrapInBraces(c)},n.prototype.compileConditional=function(e){var t,i,r,s;return r=this.variable.cacheReference(e),i=r[0],s=r[1],!i.properties.length&&i.base instanceof x&&"this"!==i.base.value&&!e.scope.check(i.base.value)&&this.variable.error('the variable "'+i.base.value+"\" can't be assigned with "+this.context+" because it has not been declared before"),Tt.call(this.context,"?")>=0?(e.isExistentialEquals=!0,new y(new u(i),s,{type:"if"}).addElse(new n(s,this.value,"=")).compileToFragments(e)):(t=new I(this.context.slice(0,-1),i,new n(s,this.value,"=")).compileToFragments(e),F>=e.level?t:this.wrapInBraces(t))},n.prototype.compileSpecialMath=function(e){var t,i,r;return i=this.variable.cacheReference(e),t=i[0],r=i[1],new n(t,new I(this.context.slice(0,-1),r,this.value)).compileToFragments(e)},n.prototype.compileSplice=function(e){var t,n,i,r,s,o,a,c,l,h,u,p;return a=this.variable.properties.pop().range,i=a.from,h=a.to,n=a.exclusive,o=this.variable.compile(e),i?(c=this.cacheToCodeFragments(i.cache(e,E)),r=c[0],s=c[1]):r=s="0",h?i instanceof z&&i.isSimpleNumber()&&h instanceof z&&h.isSimpleNumber()?(h=h.compile(e)-s,n||(h+=1)):(h=h.compile(e,T)+" - "+s,n||(h+=" + 1")):h="9e9",l=this.value.cache(e,F),u=l[0],p=l[1],t=[].concat(this.makeCode("[].splice.apply("+o+", ["+r+", "+h+"].concat("),u,this.makeCode(")), "),p),e.level>L?this.wrapInBraces(t):t},n}(r),e.Code=c=function(e){function t(e,t,n){this.params=e||[],this.body=t||new s,this.bound="boundfunc"===n,this.isGenerator=!!this.body.contains(function(e){var t;return e instanceof I&&("yield"===(t=e.operator)||"yield*"===t)})}return kt(t,e),t.prototype.children=["params","body"],t.prototype.isStatement=function(){return!!this.ctor},t.prototype.jumps=D,t.prototype.makeScope=function(e){return new P(e,this.body,this)},t.prototype.compileNode=function(e){var r,a,c,l,h,u,d,f,m,g,v,b,k,w,C,F,E,N,L,S,D,R,A,O,$,j,M,B,V,P,U,G,H;if(this.bound&&(null!=(A=e.scope.method)?A.bound:void 0)&&(this.context=e.scope.method.context),this.bound&&!this.context)return this.context="_this",H=new t([new _(new x(this.context))],new s([this])),a=new o(H,[new x("this")]),a.updateLocationDataIfMissing(this.locationData),a.compileNode(e);for(e.scope=tt(e,"classScope")||this.makeScope(e.scope),e.scope.shared=tt(e,"sharedScope"),e.indent+=q,delete e.bare,delete e.isExistentialEquals,L=[],l=[],O=this.params,u=0,m=O.length;m>u;u++)N=O[u],N instanceof p||e.scope.parameter(N.asReference(e));for($=this.params,d=0,g=$.length;g>d;d++)if(N=$[d],N.splat||N instanceof p){for(j=this.params,f=0,v=j.length;v>f;f++)E=j[f],E instanceof p||!E.name.value||e.scope.add(E.name.value,"var",!0);V=new i(new z(new n(function(){var t,n,i,r;for(i=this.params,r=[],n=0,t=i.length;t>n;n++)E=i[n],r.push(E.asReference(e));return r}.call(this))),new z(new x("arguments")));break}for(M=this.params,F=0,b=M.length;b>F;F++)N=M[F],N.isComplex()?(U=R=N.asReference(e),N.value&&(U=new I("?",R,N.value)),l.push(new i(new z(N.name),U,"=",{param:!0}))):(R=N,N.value&&(C=new x(R.name.value+" == null"),U=new i(new z(N.name),N.value,"="),l.push(new y(C,U)))),V||L.push(R);for(G=this.body.isEmpty(),V&&l.unshift(V),l.length&&(B=this.body.expressions).unshift.apply(B,l),h=S=0,k=L.length;k>S;h=++S)E=L[h],L[h]=E.compileToFragments(e),e.scope.parameter(st(L[h]));for(P=[],this.eachParamName(function(e,t){return Tt.call(P,e)>=0&&t.error("multiple parameters named "+e),P.push(e)}),G||this.noReturn||this.body.makeReturn(),c="function",this.isGenerator&&(c+="*"),this.ctor&&(c+=" "+this.name),c+="(",r=[this.makeCode(c)],h=D=0,w=L.length;w>D;h=++D)E=L[h],h&&r.push(this.makeCode(", ")),r.push.apply(r,E);return r.push(this.makeCode(") {")),this.body.isEmpty()||(r=r.concat(this.makeCode("\n"),this.body.compileWithDeclarations(e),this.makeCode("\n"+this.tab))),r.push(this.makeCode("}")),this.ctor?[this.makeCode(this.tab)].concat(Ct.call(r)):this.front||e.level>=T?this.wrapInBraces(r):r},t.prototype.eachParamName=function(e){var t,n,i,r,s;for(r=this.params,s=[],t=0,n=r.length;n>t;t++)i=r[t],s.push(i.eachName(e));return s},t.prototype.traverseChildren=function(e,n){return e?t.__super__.traverseChildren.call(this,e,n):void 0},t}(r),e.Param=_=function(e){function t(e,t,n){var i,r,s;this.name=e,this.value=t,this.splat=n,r=i=this.name.unwrapAll().value,Tt.call(V,r)>=0&&this.name.error('parameter name "'+i+'" is not allowed'),this.name instanceof A&&this.name.generated&&(s=this.name.objects[0].operatorToken,s.error("unexpected "+s.value))}return kt(t,e),t.prototype.children=["name","value"],t.prototype.compileToFragments=function(e){return this.name.compileToFragments(e,F)},t.prototype.asReference=function(e){var t,n;return this.reference?this.reference:(n=this.name,n["this"]?(t=n.properties[0].name.value,t.reserved&&(t="_"+t),n=new x(e.scope.freeVariable(t))):n.isComplex()&&(n=new x(e.scope.freeVariable("arg"))),n=new z(n),this.splat&&(n=new G(n)),n.updateLocationDataIfMissing(this.locationData),this.reference=n)},t.prototype.isComplex=function(){return this.name.isComplex()},t.prototype.eachName=function(e,t){var n,r,s,o,a,c;if(null==t&&(t=this.name),n=function(t){return e("@"+t.properties[0].name.value,t)},t instanceof x)return e(t.value,t);if(t instanceof z)return n(t);for(c=t.objects,r=0,s=c.length;s>r;r++)a=c[r],a instanceof i&&null==a.context&&(a=a.variable),a instanceof i?this.eachName(e,a.value.unwrap()):a instanceof G?(o=a.name.unwrap(),e(o.value,o)):a instanceof z?a.isArray()||a.isObject()?this.eachName(e,a.base):a["this"]?n(a):e(a.base.value,a.base):a instanceof p||a.error("illegal parameter "+a.compile())},t}(r),e.Splat=G=function(e){function t(e){this.name=e.compile?e:new x(e)}return kt(t,e),t.prototype.children=["name"],t.prototype.isAssignable=Q,t.prototype.assigns=function(e){return this.name.assigns(e)},t.prototype.compileToFragments=function(e){return this.name.compileToFragments(e)},t.prototype.unwrap=function(){return this.name},t.compileSplattedArray=function(e,n,i){var r,s,o,a,c,l,h,u,p,d,f;for(h=-1;(f=n[++h])&&!(f instanceof t););if(h>=n.length)return[];if(1===n.length)return f=n[0],c=f.compileToFragments(e,F),i?c:[].concat(f.makeCode(yt("slice",e)+".call("),c,f.makeCode(")"));for(r=n.slice(h),l=u=0,d=r.length;d>u;l=++u)f=r[l],o=f.compileToFragments(e,F),r[l]=f instanceof t?[].concat(f.makeCode(yt("slice",e)+".call("),o,f.makeCode(")")):[].concat(f.makeCode("["),o,f.makeCode("]"));return 0===h?(f=n[0],a=f.joinFragmentArrays(r.slice(1),", "),r[0].concat(f.makeCode(".concat("),a,f.makeCode(")"))):(s=function(){var t,i,r,s;for(r=n.slice(0,h),s=[],t=0,i=r.length;i>t;t++)f=r[t],s.push(f.compileToFragments(e,F));return s}(),s=n[0].joinFragmentArrays(s,", "),a=n[h].joinFragmentArrays(r,", "),p=n[n.length-1],[].concat(n[0].makeCode("["),s,n[h].makeCode("].concat("),a,p.makeCode(")")))},t}(r),e.Expansion=p=function(e){function t(){return t.__super__.constructor.apply(this,arguments)}return kt(t,e),t.prototype.isComplex=D,t.prototype.compileNode=function(){return this.error("Expansion must be used inside a destructuring assignment or parameter list")},t.prototype.asReference=function(){return this},t.prototype.eachName=function(){},t}(r),e.While=J=function(e){function t(e,t){this.condition=(null!=t?t.invert:void 0)?e.invert():e,this.guard=null!=t?t.guard:void 0}return kt(t,e),t.prototype.children=["condition","guard","body"],t.prototype.isStatement=Q,t.prototype.makeReturn=function(e){return e?t.__super__.makeReturn.apply(this,arguments):(this.returns=!this.jumps({loop:!0}),this)},t.prototype.addBody=function(e){return this.body=e,this},t.prototype.jumps=function(){var e,t,n,i,r;if(e=this.body.expressions,!e.length)return!1;for(t=0,i=e.length;i>t;t++)if(r=e[t],n=r.jumps({loop:!0}))return n;return!1},t.prototype.compileNode=function(e){var t,n,i,r;return e.indent+=q,r="",n=this.body,n.isEmpty()?n=this.makeCode(""):(this.returns&&(n.makeReturn(i=e.scope.freeVariable("results")),r=""+this.tab+i+" = [];\n"),this.guard&&(n.expressions.length>1?n.expressions.unshift(new y(new O(this.guard).invert(),new x("continue"))):this.guard&&(n=s.wrap([new y(this.guard,n)]))),n=[].concat(this.makeCode("\n"),n.compileToFragments(e,L),this.makeCode("\n"+this.tab))),t=[].concat(this.makeCode(r+this.tab+"while ("),this.condition.compileToFragments(e,N),this.makeCode(") {"),n,this.makeCode("}")),this.returns&&t.push(this.makeCode("\n"+this.tab+"return "+i+";")),t},t}(r),e.Op=I=function(e){function n(e,t,n,i){if("in"===e)return new k(t,n);if("do"===e)return this.generateDo(t);if("new"===e){if(t instanceof o&&!t["do"]&&!t.isNew)return t.newInstance();(t instanceof c&&t.bound||t["do"])&&(t=new O(t))}return this.operator=r[e]||e,this.first=t,this.second=n,this.flip=!!i,this}var r,s;return kt(n,e),r={"==":"===","!=":"!==",of:"in",yieldfrom:"yield*"},s={"!==":"===","===":"!=="},n.prototype.children=["first","second"],n.prototype.isSimpleNumber=D,n.prototype.isYield=function(){var e;return"yield"===(e=this.operator)||"yield*"===e},n.prototype.isYieldReturn=function(){return this.isYield()&&this.first instanceof M},n.prototype.isUnary=function(){return!this.second},n.prototype.isComplex=function(){var e;return!(this.isUnary()&&("+"===(e=this.operator)||"-"===e)&&this.first instanceof z&&this.first.isSimpleNumber())},n.prototype.isChainable=function(){var e;return"<"===(e=this.operator)||">"===e||">="===e||"<="===e||"==="===e||"!=="===e},n.prototype.invert=function(){var e,t,i,r,o;if(this.isChainable()&&this.first.isChainable()){for(e=!0,t=this;t&&t.operator;)e&&(e=t.operator in s),t=t.first;if(!e)return new O(this).invert();for(t=this;t&&t.operator;)t.invert=!t.invert,t.operator=s[t.operator],t=t.first;return this}return(r=s[this.operator])?(this.operator=r,this.first.unwrap()instanceof n&&this.first.invert(),this):this.second?new O(this).invert():"!"===this.operator&&(i=this.first.unwrap())instanceof n&&("!"===(o=i.operator)||"in"===o||"instanceof"===o)?i:new n("!",this)},n.prototype.unfoldSoak=function(e){var t;return("++"===(t=this.operator)||"--"===t||"delete"===t)&&bt(e,this,"first")},n.prototype.generateDo=function(e){var t,n,r,s,a,l,h,u;for(l=[],n=e instanceof i&&(h=e.value.unwrap())instanceof c?h:e,u=n.params||[],r=0,s=u.length;s>r;r++)a=u[r],a.value?(l.push(a.value),delete a.value):l.push(a);return t=new o(e,l),t["do"]=!0,t},n.prototype.compileNode=function(e){var t,n,i,r,s,o;if(n=this.isChainable()&&this.first.isChainable(),n||(this.first.front=this.front),"delete"===this.operator&&e.scope.check(this.first.unwrapAll().value)&&this.error("delete operand may not be argument or var"),("--"===(r=this.operator)||"++"===r)&&(s=this.first.unwrapAll().value,Tt.call(V,s)>=0)&&this.error('cannot increment/decrement "'+this.first.unwrapAll().value+'"'),this.isYield())return this.compileYield(e);if(this.isUnary())return this.compileUnary(e);if(n)return this.compileChain(e);switch(this.operator){case"?":return this.compileExistence(e);case"**":return this.compilePower(e);case"//":return this.compileFloorDivision(e);case"%%":return this.compileModulo(e);default:return i=this.first.compileToFragments(e,E),o=this.second.compileToFragments(e,E),t=[].concat(i,this.makeCode(" "+this.operator+" "),o),E>=e.level?t:this.wrapInBraces(t)}},n.prototype.compileChain=function(e){var t,n,i,r;return i=this.first.second.cache(e),this.first.second=i[0],r=i[1],n=this.first.compileToFragments(e,E),t=n.concat(this.makeCode(" "+(this.invert?"&&":"||")+" "),r.compileToFragments(e),this.makeCode(" "+this.operator+" "),this.second.compileToFragments(e,E)),this.wrapInBraces(t)},n.prototype.compileExistence=function(e){var t,n;return this.first.isComplex()?(n=new x(e.scope.freeVariable("ref")),t=new O(new i(n,this.first))):(t=this.first,n=t),new y(new u(t),n,{type:"if"}).addElse(this.second).compileToFragments(e)},n.prototype.compileUnary=function(e){var t,i,r;return i=[],t=this.operator,i.push([this.makeCode(t)]),"!"===t&&this.first instanceof u?(this.first.negated=!this.first.negated,this.first.compileToFragments(e)):e.level>=T?new O(this).compileToFragments(e):(r="+"===t||"-"===t,("new"===t||"typeof"===t||"delete"===t||r&&this.first instanceof n&&this.first.operator===t)&&i.push([this.makeCode(" ")]),(r&&this.first instanceof n||"new"===t&&this.first.isStatement(e))&&(this.first=new O(this.first)),i.push(this.first.compileToFragments(e,E)),this.flip&&i.reverse(),this.joinFragmentArrays(i,""))},n.prototype.compileYield=function(e){var t,n;return n=[],t=this.operator,null==e.scope.parent&&this.error("yield statements must occur within a function generator."),Tt.call(Object.keys(this.first),"expression")>=0&&!(this.first instanceof W)?this.isYieldReturn()?n.push(this.first.compileToFragments(e,L)):null!=this.first.expression&&n.push(this.first.expression.compileToFragments(e,E)):(n.push([this.makeCode("("+t+" ")]),n.push(this.first.compileToFragments(e,E)),n.push([this.makeCode(")")])),this.joinFragmentArrays(n,"")},n.prototype.compilePower=function(e){var n;return n=new z(new x("Math"),[new t(new x("pow"))]),new o(n,[this.first,this.second]).compileToFragments(e)},n.prototype.compileFloorDivision=function(e){var i,r;return r=new z(new x("Math"),[new t(new x("floor"))]),i=new n("/",this.first,this.second),new o(r,[i]).compileToFragments(e)},n.prototype.compileModulo=function(e){var t;return t=new z(new x(yt("modulo",e))),new o(t,[this.first,this.second]).compileToFragments(e)},n.prototype.toString=function(e){return n.__super__.toString.call(this,e,this.constructor.name+" "+this.operator)},n}(r),e.In=k=function(e){function t(e,t){this.object=e,this.array=t}return kt(t,e),t.prototype.children=["object","array"],t.prototype.invert=S,t.prototype.compileNode=function(e){var t,n,i,r,s;if(this.array instanceof z&&this.array.isArray()&&this.array.base.objects.length){for(s=this.array.base.objects,n=0,i=s.length;i>n;n++)if(r=s[n],r instanceof G){t=!0;break}if(!t)return this.compileOrTest(e)}return this.compileLoopTest(e)},t.prototype.compileOrTest=function(e){var t,n,i,r,s,o,a,c,l,h,u,p;for(c=this.object.cache(e,E),u=c[0],a=c[1],l=this.negated?[" !== "," && "]:[" === "," || "],t=l[0],n=l[1],p=[],h=this.array.base.objects,i=s=0,o=h.length;o>s;i=++s)r=h[i],i&&p.push(this.makeCode(n)),p=p.concat(i?a:u,this.makeCode(t),r.compileToFragments(e,T));return E>e.level?p:this.wrapInBraces(p)},t.prototype.compileLoopTest=function(e){var t,n,i,r;return i=this.object.cache(e,F),r=i[0],n=i[1],t=[].concat(this.makeCode(yt("indexOf",e)+".call("),this.array.compileToFragments(e,F),this.makeCode(", "),n,this.makeCode(") "+(this.negated?"< 0":">= 0"))),st(r)===st(n)?t:(t=r.concat(this.makeCode(", "),t),F>e.level?t:this.wrapInBraces(t))},t.prototype.toString=function(e){return t.__super__.toString.call(this,e,this.constructor.name+(this.negated?"!":""))},t}(r),e.Try=Y=function(e){function t(e,t,n,i){this.attempt=e,this.errorVariable=t,this.recovery=n,this.ensure=i}return kt(t,e),t.prototype.children=["attempt","recovery","ensure"],t.prototype.isStatement=Q,t.prototype.jumps=function(e){var t;return this.attempt.jumps(e)||(null!=(t=this.recovery)?t.jumps(e):void 0)},t.prototype.makeReturn=function(e){return this.attempt&&(this.attempt=this.attempt.makeReturn(e)),this.recovery&&(this.recovery=this.recovery.makeReturn(e)),this},t.prototype.compileNode=function(e){var t,n,r,s,o;return e.indent+=q,o=this.attempt.compileToFragments(e,L),t=this.recovery?(r=e.scope.freeVariable("error"),s=new x(r),this.errorVariable?this.recovery.unshift(new i(this.errorVariable,s)):void 0,[].concat(this.makeCode(" catch ("),s.compileToFragments(e),this.makeCode(") {\n"),this.recovery.compileToFragments(e,L),this.makeCode("\n"+this.tab+"}"))):this.ensure||this.recovery?[]:[this.makeCode(" catch ("+r+") {}")],n=this.ensure?[].concat(this.makeCode(" finally {\n"),this.ensure.compileToFragments(e,L),this.makeCode("\n"+this.tab+"}")):[],[].concat(this.makeCode(this.tab+"try {\n"),o,this.makeCode("\n"+this.tab+"}"),t,n)},t}(r),e.Throw=W=function(e){function t(e){this.expression=e}return kt(t,e),t.prototype.children=["expression"],t.prototype.isStatement=Q,t.prototype.jumps=D,t.prototype.makeReturn=X,t.prototype.compileNode=function(e){return[].concat(this.makeCode(this.tab+"throw "),this.expression.compileToFragments(e),this.makeCode(";"))},t}(r),e.Existence=u=function(e){function t(e){this.expression=e}return kt(t,e),t.prototype.children=["expression"],t.prototype.invert=S,t.prototype.compileNode=function(e){var t,n,i,r;return this.expression.front=this.front,i=this.expression.compile(e,E),g.test(i)&&!e.scope.check(i)?(r=this.negated?["===","||"]:["!==","&&"],t=r[0],n=r[1],i="typeof "+i+" "+t+' "undefined" '+n+" "+i+" "+t+" null"):i=i+" "+(this.negated?"==":"!=")+" null",[this.makeCode(C>=e.level?i:"("+i+")")]},t}(r),e.Parens=O=function(e){function t(e){this.body=e}return kt(t,e),t.prototype.children=["body"],t.prototype.unwrap=function(){return this.body},t.prototype.isComplex=function(){return this.body.isComplex()},t.prototype.compileNode=function(e){var t,n,i;return n=this.body.unwrap(),n instanceof z&&n.isAtomic()?(n.front=this.front,n.compileToFragments(e)):(i=n.compileToFragments(e,N),t=E>e.level&&(n instanceof I||n instanceof o||n instanceof f&&n.returns),t?i:this.wrapInBraces(i))},t}(r),e.For=f=function(e){function t(e,t){var n;this.source=t.source,this.guard=t.guard,this.step=t.step,this.name=t.name,this.index=t.index,this.body=s.wrap([e]),this.own=!!t.own,this.object=!!t.object,this.object&&(n=[this.index,this.name],this.name=n[0],this.index=n[1]),this.index instanceof z&&this.index.error("index cannot be a pattern matching expression"),this.range=this.source instanceof z&&this.source.base instanceof j&&!this.source.properties.length,this.pattern=this.name instanceof z,this.range&&this.index&&this.index.error("indexes do not apply to range loops"),this.range&&this.pattern&&this.name.error("cannot pattern match over range loops"),this.own&&!this.object&&this.name.error("cannot use own with for-in"),this.returns=!1}return kt(t,e),t.prototype.children=["body","source","guard","step"],t.prototype.compileNode=function(e){var t,n,r,o,a,c,l,h,u,p,d,f,m,v,b,k,w,T,C,E,N,S,D,A,I,_,$,j,B,V,P,U,G,H;return t=s.wrap([this.body]),D=t.expressions,T=D[D.length-1],(null!=T?T.jumps():void 0)instanceof M&&(this.returns=!1),B=this.range?this.source.base:this.source,j=e.scope,this.pattern||(E=this.name&&this.name.compile(e,F)),v=this.index&&this.index.compile(e,F),E&&!this.pattern&&j.find(E),v&&j.find(v),this.returns&&($=j.freeVariable("results")),b=this.object&&v||j.freeVariable("i",{single:!0}),k=this.range&&E||v||b,w=k!==b?k+" = ":"",this.step&&!this.range&&(A=this.cacheToCodeFragments(this.step.cache(e,F,ot)),V=A[0],U=A[1],P=U.match(R)),this.pattern&&(E=b),H="",d="",l="",f=this.tab+q,this.range?p=B.compileToFragments(ht(e,{index:b,name:E,step:this.step,isComplex:ot})):(G=this.source.compile(e,F),!E&&!this.own||g.test(G)||(l+=""+this.tab+(S=j.freeVariable("ref"))+" = "+G+";\n",G=S),E&&!this.pattern&&(N=E+" = "+G+"["+k+"]"),this.object||(V!==U&&(l+=""+this.tab+V+";\n"),this.step&&P&&(u=0>pt(P[0]))||(C=j.freeVariable("len")),a=""+w+b+" = 0, "+C+" = "+G+".length",c=""+w+b+" = "+G+".length - 1",r=b+" < "+C,o=b+" >= 0",this.step?(P?u&&(r=o,a=c):(r=U+" > 0 ? "+r+" : "+o,a="("+U+" > 0 ? ("+a+") : "+c+")"),m=b+" += "+U):m=""+(k!==b?"++"+b:b+"++"),p=[this.makeCode(a+"; "+r+"; "+w+m)])),this.returns&&(I=""+this.tab+$+" = [];\n",_="\n"+this.tab+"return "+$+";",t.makeReturn($)),this.guard&&(t.expressions.length>1?t.expressions.unshift(new y(new O(this.guard).invert(),new x("continue"))):this.guard&&(t=s.wrap([new y(this.guard,t)]))),this.pattern&&t.expressions.unshift(new i(this.name,new x(G+"["+k+"]"))),h=[].concat(this.makeCode(l),this.pluckDirectCall(e,t)),N&&(H="\n"+f+N+";"),this.object&&(p=[this.makeCode(k+" in "+G)],this.own&&(d="\n"+f+"if (!"+yt("hasProp",e)+".call("+G+", "+k+")) continue;")),n=t.compileToFragments(ht(e,{indent:f}),L),n&&n.length>0&&(n=[].concat(this.makeCode("\n"),n,this.makeCode("\n"))),[].concat(h,this.makeCode(""+(I||"")+this.tab+"for ("),p,this.makeCode(") {"+d+H),n,this.makeCode(this.tab+"}"+(_||"")))},t.prototype.pluckDirectCall=function(e,t){var n,r,s,a,l,h,u,p,d,f,m,g,v,b,y,k;for(r=[],d=t.expressions,l=h=0,u=d.length;u>h;l=++h)s=d[l],s=s.unwrapAll(),s instanceof o&&(k=null!=(f=s.variable)?f.unwrapAll():void 0,(k instanceof c||k instanceof z&&(null!=(m=k.base)?m.unwrapAll():void 0)instanceof c&&1===k.properties.length&&("call"===(g=null!=(v=k.properties[0].name)?v.value:void 0)||"apply"===g))&&(a=(null!=(b=k.base)?b.unwrapAll():void 0)||k,p=new x(e.scope.freeVariable("fn")),n=new z(p),k.base&&(y=[n,k],k.base=y[0],n=y[1]),t.expressions[l]=new o(n,s.args),r=r.concat(this.makeCode(this.tab),new i(p,a).compileToFragments(e,L),this.makeCode(";\n"))));return r},t}(J),e.Switch=H=function(e){function t(e,t,n){this.subject=e,this.cases=t,this.otherwise=n}return kt(t,e),t.prototype.children=["subject","cases","otherwise"],t.prototype.isStatement=Q,t.prototype.jumps=function(e){var t,n,i,r,s,o,a,c;for(null==e&&(e={block:!0}),o=this.cases,i=0,s=o.length;s>i;i++)if(a=o[i],n=a[0],t=a[1],r=t.jumps(e))return r;return null!=(c=this.otherwise)?c.jumps(e):void 0},t.prototype.makeReturn=function(e){var t,n,i,r,o;for(r=this.cases,t=0,n=r.length;n>t;t++)i=r[t],i[1].makeReturn(e);return e&&(this.otherwise||(this.otherwise=new s([new x("void 0")]))),null!=(o=this.otherwise)&&o.makeReturn(e),this},t.prototype.compileNode=function(e){var t,n,i,r,s,o,a,c,l,h,u,p,d,f,m,g;for(c=e.indent+q,l=e.indent=c+q,o=[].concat(this.makeCode(this.tab+"switch ("),this.subject?this.subject.compileToFragments(e,N):this.makeCode("false"),this.makeCode(") {\n")),f=this.cases,a=h=0,p=f.length;p>h;a=++h){for(m=f[a],r=m[0],t=m[1],g=rt([r]),u=0,d=g.length;d>u;u++)i=g[u],this.subject||(i=i.invert()),o=o.concat(this.makeCode(c+"case "),i.compileToFragments(e,N),this.makeCode(":\n"));if((n=t.compileToFragments(e,L)).length>0&&(o=o.concat(n,this.makeCode("\n"))),a===this.cases.length-1&&!this.otherwise)break;s=this.lastNonComment(t.expressions),s instanceof M||s instanceof x&&s.jumps()&&"debugger"!==s.value||o.push(i.makeCode(l+"break;\n"))}return this.otherwise&&this.otherwise.expressions.length&&o.push.apply(o,[this.makeCode(c+"default:\n")].concat(Ct.call(this.otherwise.compileToFragments(e,L)),[this.makeCode("\n")])),o.push(this.makeCode(this.tab+"}")),o},t}(r),e.If=y=function(e){function t(e,t,n){this.body=t,null==n&&(n={}),this.condition="unless"===n.type?e.invert():e,this.elseBody=null,this.isChain=!1,this.soak=n.soak}return kt(t,e),t.prototype.children=["condition","body","elseBody"],t.prototype.bodyNode=function(){var e;return null!=(e=this.body)?e.unwrap():void 0},t.prototype.elseBodyNode=function(){var e;return null!=(e=this.elseBody)?e.unwrap():void 0},t.prototype.addElse=function(e){return this.isChain?this.elseBodyNode().addElse(e):(this.isChain=e instanceof t,this.elseBody=this.ensureBlock(e),this.elseBody.updateLocationDataIfMissing(e.locationData)),this},t.prototype.isStatement=function(e){var t;return(null!=e?e.level:void 0)===L||this.bodyNode().isStatement(e)||(null!=(t=this.elseBodyNode())?t.isStatement(e):void 0)},t.prototype.jumps=function(e){var t;return this.body.jumps(e)||(null!=(t=this.elseBody)?t.jumps(e):void 0)},t.prototype.compileNode=function(e){return this.isStatement(e)?this.compileStatement(e):this.compileExpression(e)},t.prototype.makeReturn=function(e){return e&&(this.elseBody||(this.elseBody=new s([new x("void 0")]))),this.body&&(this.body=new s([this.body.makeReturn(e)])),this.elseBody&&(this.elseBody=new s([this.elseBody.makeReturn(e)])),this},t.prototype.ensureBlock=function(e){return e instanceof s?e:new s([e])},t.prototype.compileStatement=function(e){var n,i,r,s,o,a,c;return r=tt(e,"chainChild"),(o=tt(e,"isExistentialEquals"))?new t(this.condition.invert(),this.elseBodyNode(),{type:"if"}).compileToFragments(e):(c=e.indent+q,s=this.condition.compileToFragments(e,N),i=this.ensureBlock(this.body).compileToFragments(ht(e,{indent:c})),a=[].concat(this.makeCode("if ("),s,this.makeCode(") {\n"),i,this.makeCode("\n"+this.tab+"}")),r||a.unshift(this.makeCode(this.tab)),this.elseBody?(n=a.concat(this.makeCode(" else ")),this.isChain?(e.chainChild=!0,n=n.concat(this.elseBody.unwrap().compileToFragments(e,L))):n=n.concat(this.makeCode("{\n"),this.elseBody.compileToFragments(ht(e,{indent:c}),L),this.makeCode("\n"+this.tab+"}")),n):a)},t.prototype.compileExpression=function(e){var t,n,i,r;return i=this.condition.compileToFragments(e,C),n=this.bodyNode().compileToFragments(e,F),t=this.elseBodyNode()?this.elseBodyNode().compileToFragments(e,F):[this.makeCode("void 0")],r=i.concat(this.makeCode(" ? "),n,this.makeCode(" : "),t),e.level>=C?this.wrapInBraces(r):r},t.prototype.unfoldSoak=function(){return this.soak&&this},t}(r),K={extend:function(e){return"function(child, parent) { for (var key in parent) { if ("+yt("hasProp",e)+".call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }"},bind:function(){return"function(fn, me){ return function(){ return fn.apply(me, arguments); }; }"},indexOf:function(){return"[].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; }"},modulo:function(){return"function(a, b) { return (+a % (b = +b) + b) % b; }"},hasProp:function(){return"{}.hasOwnProperty"},slice:function(){return"[].slice"}},L=1,N=2,F=3,C=4,E=5,T=6,q=" ",g=/^(?!\d)[$\w\x7f-\uffff]+$/,B=/^[+-]?\d+$/,m=/^[+-]?0x[\da-f]+/i,R=/^[+-]?(?:0x[\da-f]+|\d*\.?\d+(?:e[+-]?\d+)?)$/i,b=/^['"]/,v=/^\//,yt=function(e,t){var n,i;return i=t.scope.root,e in i.utilities?i.utilities[e]:(n=i.freeVariable(e),i.assign(n,K[e](t)),i.utilities[e]=n)},ut=function(e,t){return e=e.replace(/\n/g,"$&"+t),e.replace(/\s+$/,"")},pt=function(e){return null==e?0:e.match(m)?parseInt(e,16):parseFloat(e)},at=function(e){return e instanceof x&&"arguments"===e.value&&!e.asKey},ct=function(e){return e instanceof x&&"this"===e.value&&!e.asKey||e instanceof c&&e.bound||e instanceof o&&e.isSuper},ot=function(e){return e.isComplex()||("function"==typeof e.isAssignable?e.isAssignable():void 0)},bt=function(e,t,n){var i;if(i=t[n].unfoldSoak(e))return t[n]=i.body,i.body=new z(t),i}}.call(this),t.exports}(),require["./sourcemap"]=function(){var e={},t={exports:e};return function(){var e,n;e=function(){function e(e){this.line=e,this.columns=[]}return e.prototype.add=function(e,t,n){var i,r;return r=t[0],i=t[1],null==n&&(n={}),this.columns[e]&&n.noReplace?void 0:this.columns[e]={line:this.line,column:e,sourceLine:r,sourceColumn:i}},e.prototype.sourceLocation=function(e){for(var t;!((t=this.columns[e])||0>=e);)e--;return t&&[t.sourceLine,t.sourceColumn]},e}(),n=function(){function t(){this.lines=[]}var n,i,r,s;return t.prototype.add=function(t,n,i){var r,s,o,a;return null==i&&(i={}),o=n[0],s=n[1],a=(r=this.lines)[o]||(r[o]=new e(o)),a.add(s,t,i)},t.prototype.sourceLocation=function(e){var t,n,i;for(n=e[0],t=e[1];!((i=this.lines[n])||0>=n);)n--;return i&&i.sourceLocation(t)},t.prototype.generate=function(e,t){var n,i,r,s,o,a,c,l,h,u,p,d,f,m,g,v;for(null==e&&(e={}),null==t&&(t=null),v=0,s=0,a=0,o=0,d=!1,n="",f=this.lines,u=i=0,c=f.length;c>i;u=++i)if(h=f[u])for(m=h.columns,r=0,l=m.length;l>r;r++)if(p=m[r]){for(;p.line>v;)s=0,d=!1,n+=";",v++;d&&(n+=",",d=!1),n+=this.encodeVlq(p.column-s),s=p.column,n+=this.encodeVlq(0),n+=this.encodeVlq(p.sourceLine-a),a=p.sourceLine,n+=this.encodeVlq(p.sourceColumn-o),o=p.sourceColumn,d=!0}return g={version:3,file:e.generatedFile||"",sourceRoot:e.sourceRoot||"",sources:e.sourceFiles||[""],names:[],mappings:n},e.inline&&(g.sourcesContent=[t]),JSON.stringify(g,null,2)},r=5,i=1<e?1:0,a=(Math.abs(e)<<1)+o;a||!t;)n=a&s,a>>=r,a&&(n|=i),t+=this.encodeBase64(n);return t},n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",t.prototype.encodeBase64=function(e){return n[e]||function(){throw Error("Cannot Base64 encode value: "+e) +}()},t}(),t.exports=n}.call(this),t.exports}(),require["./coffee-script"]=function(){var e={},t={exports:e};return function(){var t,n,i,r,s,o,a,c,l,h,u,p,d,f,m,g,v,b,y={}.hasOwnProperty,k=[].indexOf||function(e){for(var t=0,n=this.length;n>t;t++)if(t in this&&this[t]===e)return t;return-1};if(a=require("fs"),v=require("vm"),f=require("path"),t=require("./lexer").Lexer,d=require("./parser").parser,l=require("./helpers"),n=require("./sourcemap"),e.VERSION="1.10.0",e.FILE_EXTENSIONS=[".coffee",".litcoffee",".coffee.md"],e.helpers=l,b=function(e){return function(t,n){var i,r;null==n&&(n={});try{return e.call(this,t,n)}catch(r){if(i=r,"string"!=typeof t)throw i;throw l.updateSyntaxError(i,t,n.filename)}}},e.compile=r=b(function(e,t){var i,r,s,o,a,c,h,u,f,m,g,v,b,y,k;for(v=l.merge,o=l.extend,t=o({},t),t.sourceMap&&(g=new n),k=p.tokenize(e,t),t.referencedVars=function(){var e,t,n;for(n=[],e=0,t=k.length;t>e;e++)y=k[e],y.variable&&n.push(y[1]);return n}(),c=d.parse(k).compileToFragments(t),s=0,t.header&&(s+=1),t.shiftLine&&(s+=1),r=0,f="",u=0,m=c.length;m>u;u++)a=c[u],t.sourceMap&&(a.locationData&&!/^[;\s]*$/.test(a.code)&&g.add([a.locationData.first_line,a.locationData.first_column],[s,r],{noReplace:!0}),b=l.count(a.code,"\n"),s+=b,b?r=a.code.length-(a.code.lastIndexOf("\n")+1):r+=a.code.length),f+=a.code;return t.header&&(h="Generated by CoffeeScript "+this.VERSION,f="// "+h+"\n"+f),t.sourceMap?(i={js:f},i.sourceMap=g,i.v3SourceMap=g.generate(t,e),i):f}),e.tokens=b(function(e,t){return p.tokenize(e,t)}),e.nodes=b(function(e,t){return"string"==typeof e?d.parse(p.tokenize(e,t)):d.parse(e)}),e.run=function(e,t){var n,i,s,o;return null==t&&(t={}),s=require.main,s.filename=process.argv[1]=t.filename?a.realpathSync(t.filename):".",s.moduleCache&&(s.moduleCache={}),i=t.filename?f.dirname(a.realpathSync(t.filename)):a.realpathSync("."),s.paths=require("module")._nodeModulePaths(i),(!l.isCoffee(s.filename)||require.extensions)&&(n=r(e,t),e=null!=(o=n.js)?o:n),s._compile(e,s.filename)},e.eval=function(e,t){var n,i,s,o,a,c,l,h,u,p,d,m,g,b,k,w,T;if(null==t&&(t={}),e=e.trim()){if(o=null!=(m=v.Script.createContext)?m:v.createContext,c=null!=(g=v.isContext)?g:function(){return t.sandbox instanceof o().constructor},o){if(null!=t.sandbox){if(c(t.sandbox))w=t.sandbox;else{w=o(),b=t.sandbox;for(h in b)y.call(b,h)&&(T=b[h],w[h]=T)}w.global=w.root=w.GLOBAL=w}else w=global;if(w.__filename=t.filename||"eval",w.__dirname=f.dirname(w.__filename),w===global&&!w.module&&!w.require){for(n=require("module"),w.module=i=new n(t.modulename||"eval"),w.require=s=function(e){return n._load(e,i,!0)},i.filename=w.__filename,k=Object.getOwnPropertyNames(require),a=0,u=k.length;u>a;a++)d=k[a],"paths"!==d&&"arguments"!==d&&"caller"!==d&&(s[d]=require[d]);s.paths=i.paths=n._nodeModulePaths(process.cwd()),s.resolve=function(e){return n._resolveFilename(e,i)}}}p={};for(h in t)y.call(t,h)&&(T=t[h],p[h]=T);return p.bare=!0,l=r(e,p),w===global?v.runInThisContext(l):v.runInContext(l,w)}},e.register=function(){return require("./register")},require.extensions)for(m=this.FILE_EXTENSIONS,h=0,u=m.length;u>h;h++)s=m[h],null==(i=require.extensions)[s]&&(i[s]=function(){throw Error("Use CoffeeScript.register() or require the coffee-script/register module to require "+s+" files.")});e._compileFile=function(e,t){var n,i,s,o,c;null==t&&(t=!1),o=a.readFileSync(e,"utf8"),c=65279===o.charCodeAt(0)?o.substring(1):o;try{n=r(c,{filename:e,sourceMap:t,literate:l.isLiterate(e)})}catch(s){throw i=s,l.updateSyntaxError(i,c,e)}return n},p=new t,d.lexer={lex:function(){var e,t;return t=d.tokens[this.pos++],t?(e=t[0],this.yytext=t[1],this.yylloc=t[2],d.errorToken=t.origin||t,this.yylineno=this.yylloc.first_line):e="",e},setInput:function(e){return d.tokens=e,this.pos=0},upcomingInput:function(){return""}},d.yy=require("./nodes"),d.yy.parseError=function(e,t){var n,i,r,s,o,a;return o=t.token,s=d.errorToken,a=d.tokens,i=s[0],r=s[1],n=s[2],r=function(){switch(!1){case s!==a[a.length-1]:return"end of input";case"INDENT"!==i&&"OUTDENT"!==i:return"indentation";case"IDENTIFIER"!==i&&"NUMBER"!==i&&"STRING"!==i&&"STRING_START"!==i&&"REGEX"!==i&&"REGEX_START"!==i:return i.replace(/_START$/,"").toLowerCase();default:return l.nameWhitespaceCharacter(r)}}(),l.throwSyntaxError("unexpected "+r,n)},o=function(e,t){var n,i,r,s,o,a,c,l,h,u,p,d;return s=void 0,r="",e.isNative()?r="native":(e.isEval()?(s=e.getScriptNameOrSourceURL(),s||(r=e.getEvalOrigin()+", ")):s=e.getFileName(),s||(s=""),l=e.getLineNumber(),i=e.getColumnNumber(),u=t(s,l,i),r=u?s+":"+u[0]+":"+u[1]:s+":"+l+":"+i),o=e.getFunctionName(),a=e.isConstructor(),c=!(e.isToplevel()||a),c?(h=e.getMethodName(),d=e.getTypeName(),o?(p=n="",d&&o.indexOf(d)&&(p=d+"."),h&&o.indexOf("."+h)!==o.length-h.length-1&&(n=" [as "+h+"]"),""+p+o+n+" ("+r+")"):d+"."+(h||"")+" ("+r+")"):a?"new "+(o||"")+" ("+r+")":o?o+" ("+r+")":r},g={},c=function(t){var n,i;if(g[t])return g[t];if(i=null!=f?f.extname(t):void 0,!(0>k.call(e.FILE_EXTENSIONS,i)))return n=e._compileFile(t,!0),g[t]=n.sourceMap},Error.prepareStackTrace=function(t,n){var i,r,s;return s=function(e,t,n){var i,r;return r=c(e),r&&(i=r.sourceLocation([t-1,n-1])),i?[i[0]+1,i[1]+1]:null},r=function(){var t,r,a;for(a=[],t=0,r=n.length;r>t&&(i=n[t],i.getFunction()!==e.run);t++)a.push(" at "+o(i,s));return a}(),""+t+"\n"+r.join("\n")+"\n"}}.call(this),t.exports}(),require["./browser"]=function(){var exports={},module={exports:exports};return function(){var CoffeeScript,compile,runScripts,indexOf=[].indexOf||function(e){for(var t=0,n=this.length;n>t;t++)if(t in this&&this[t]===e)return t;return-1};CoffeeScript=require("./coffee-script"),CoffeeScript.require=require,compile=CoffeeScript.compile,CoffeeScript.eval=function(code,options){return null==options&&(options={}),null==options.bare&&(options.bare=!0),eval(compile(code,options))},CoffeeScript.run=function(e,t){return null==t&&(t={}),t.bare=!0,t.shiftLine=!0,Function(compile(e,t))()},"undefined"!=typeof window&&null!==window&&("undefined"!=typeof btoa&&null!==btoa&&"undefined"!=typeof JSON&&null!==JSON&&"undefined"!=typeof unescape&&null!==unescape&&"undefined"!=typeof encodeURIComponent&&null!==encodeURIComponent&&(compile=function(e,t){var n,i,r;return null==t&&(t={}),t.sourceMap=!0,t.inline=!0,i=CoffeeScript.compile(e,t),n=i.js,r=i.v3SourceMap,n+"\n//# sourceMappingURL=data:application/json;base64,"+btoa(unescape(encodeURIComponent(r)))+"\n//# sourceURL=coffeescript"}),CoffeeScript.load=function(e,t,n,i){var r;return null==n&&(n={}),null==i&&(i=!1),n.sourceFiles=[e],r=window.ActiveXObject?new window.ActiveXObject("Microsoft.XMLHTTP"):new window.XMLHttpRequest,r.open("GET",e,!0),"overrideMimeType"in r&&r.overrideMimeType("text/plain"),r.onreadystatechange=function(){var s,o;if(4===r.readyState){if(0!==(o=r.status)&&200!==o)throw Error("Could not load "+e);if(s=[r.responseText,n],i||CoffeeScript.run.apply(CoffeeScript,s),t)return t(s)}},r.send(null)},runScripts=function(){var e,t,n,i,r,s,o,a,c,l,h;for(h=window.document.getElementsByTagName("script"),t=["text/coffeescript","text/literate-coffeescript"],e=function(){var e,n,i,r;for(r=[],e=0,n=h.length;n>e;e++)c=h[e],i=c.type,indexOf.call(t,i)>=0&&r.push(c);return r}(),s=0,n=function(){var t;return t=e[s],t instanceof Array?(CoffeeScript.run.apply(CoffeeScript,t),s++,n()):void 0},i=function(i,r){var s,o;return s={literate:i.type===t[1]},o=i.src||i.getAttribute("data-src"),o?CoffeeScript.load(o,function(t){return e[r]=t,n()},s,!0):(s.sourceFiles=["embedded"],e[r]=[i.innerHTML,s])},r=o=0,a=e.length;a>o;r=++o)l=e[r],i(l,r);return n()},window.addEventListener?window.addEventListener("DOMContentLoaded",runScripts,!1):window.attachEvent("onload",runScripts))}.call(this),module.exports}(),require["./coffee-script"]}();"function"==typeof define&&define.amd?define(function(){return CoffeeScript}):root.CoffeeScript=CoffeeScript})(this); \ No newline at end of file diff --git a/tools/coffee/coffee.wsf b/tools/coffee/coffee.wsf index 25d590c9..c8909f7b 100644 --- a/tools/coffee/coffee.wsf +++ b/tools/coffee/coffee.wsf @@ -42,7 +42,7 @@ function convert(input, output) { } var coffee; - if (!input) { + if (!input) { // Read all input data from STDIN var chunks = []; while (!WScript.StdIn.AtEndOfStream) @@ -62,7 +62,8 @@ function convert(input, output) { return new f; } - var js = CoffeeScript.compile(coffee, {filename: "temp.coffee"}); + var js = CoffeeScript.compile(coffee); + if (!output) { WScript.StdOut.Write(js); } diff --git a/trackers.txt b/trackers.txt deleted file mode 100644 index a42f8ca4..00000000 --- a/trackers.txt +++ /dev/null @@ -1,142 +0,0 @@ -udp://tracker.opentrackr.org:1337/announce -udp://explodie.org:6969/announce -udp://open.stealth.si:80/announce -http://tracker.ipv6tracker.ru:80/announce -udp://tracker.birkenwald.de:6969/announce -udp://tracker.moeking.me:6969/announce -http://tracker.bt4g.com:2095/announce -https://tracker.nanoha.org:443/announce -http://tracker.files.fm:6969/announce -http://open.acgnxtracker.com:80/announce -udp://tracker.army:6969/announce -udp://fe.dealclub.de:6969/announce -udp://tracker.leech.ie:1337/announce -udp://tracker.altrosky.nl:6969/announce -https://tracker.cyber-hub.net:443/announce -https://tracker.lilithraws.cf:443/announce -http://bt.okmp3.ru:2710/announce -udp://vibe.sleepyinternetfun.xyz:1738/announce -udp://open.publictracker.xyz:6969/announce -udp://tracker.bitsearch.to:1337/announce -udp://tracker.pomf.se:80/announce -https://tr.burnabyhighstar.com:443/announce -https://tr.abiir.top:443/announce -udp://open.free-tracker.ga:6969/announce -http://i-p-v-6.tk:6969/announce -http://open-v6.demonoid.ch:6969/announce -udp://aarsen.me:6969/announce -udp://htz3.noho.st:6969/announce -udp://uploads.gamecoast.net:6969/announce -udp://mail.zasaonsk.ga:6969/announce -udp://tracker.joybomb.tw:6969/announce -udp://tracker.jonaslsa.com:6969/announce -udp://leefafa.tk:6969/announce -udp://carr.codes:6969/announce -https://tr.fuckbitcoin.xyz:443/announce -udp://tracker.cubonegro.xyz:6969/announce -udp://tracker.skynetcloud.site:6969/announce -http://tracker4.itzmx.com:2710/announce -https://tracker.lilithraws.org:443/announce -udp://tracker.novaopcj.eu.org:6969/announce -udp://exodus.desync.com:6969/announce -http://t.acg.rip:6699/announce -udp://tracker2.dler.com:80/announce -udp://6ahddutb1ucc3cp.ru:6969/announce -udp://tracker.blacksparrowmedia.net:6969/announce -http://fxtt.ru:80/announce -udp://tracker.auctor.tv:6969/announce -udp://torrentclub.space:6969/announce -udp://zecircle.xyz:6969/announce -udp://psyco.fr:6969/announce -udp://fh2.cmp-gaming.com:6969/announce -udp://new-line.net:6969/announce -udp://torrents.artixlinux.org:6969/announce -udp://bt.ktrackers.com:6666/announce -udp://static.54.161.216.95.clients.your-server.de:6969/announce -udp://cpe-104-34-3-152.socal.res.rr.com:6969/announce -http://t.overflow.biz:6969/announce -udp://tracker1.myporn.club:9337/announce -udp://moonburrow.club:6969/announce -udp://tracker.artixlinux.org:6969/announce -https://t1.hloli.org:443/announce -udp://bt1.archive.org:6969/announce -udp://tracker.theoks.net:6969/announce -udp://tracker.4.babico.name.tr:3131/announce -udp://buddyfly.top:6969/announce -udp://ipv6.tracker.harry.lu:80/announce -udp://public.publictracker.xyz:6969/announce -udp://mail.artixlinux.org:6969/announce -udp://v1046920.hosted-by-vdsina.ru:6969/announce -udp://tracker.cyberia.is:6969/announce -udp://tracker.beeimg.com:6969/announce -udp://creative.7o7.cx:6969/announce -udp://open.dstud.io:6969/announce -udp://laze.cc:6969/announce -udp://download.nerocloud.me:6969/announce -udp://cutscloud.duckdns.org:6969/announce -https://tracker.jiesen.life:8443/announce -udp://jutone.com:6969/announce -udp://wepzone.net:6969/announce -udp://ipv4.tracker.harry.lu:80/announce -udp://tracker.tcp.exchange:6969/announce -udp://f1sh.de:6969/announce -udp://movies.zsw.ca:6969/announce -https://tracker1.ctix.cn:443/announce -udp://sanincode.com:6969/announce -udp://www.torrent.eu.org:451/announce -udp://open.4ever.tk:6969/announce -https://tracker2.ctix.cn:443/announce -udp://bt2.archive.org:6969/announce -http://t.nyaatracker.com:80/announce -udp://yahor.ftp.sh:6969/announce -udp://tracker.openbtba.com:6969/announce -udp://tracker.dler.com:6969/announce -udp://tracker-udp.gbitt.info:80/announce -udp://tracker.srv00.com:6969/announce -udp://tracker.pimpmyworld.to:6969/announce -http://tracker.gbitt.info:80/announce -udp://tracker6.lelux.fi:6969/announce -http://tracker.vrpnet.org:6969/announce -http://00.xxtor.com:443/announce -http://vps02.net.orel.ru:80/announce -udp://tracker.yangxiaoguozi.cn:6969/announce -udp://rep-art.ynh.fr:6969/announce -https://tracker.imgoingto.icu:443/announce -udp://mirror.aptus.co.tz:6969/announce -udp://tracker.lelux.fi:6969/announce -udp://tracker.torrent.eu.org:451/announce -udp://admin.52ywp.com:6969/announce -udp://thouvenin.cloud:6969/announce -http://vps-dd0a0715.vps.ovh.net:6969/announce -udp://bubu.mapfactor.com:6969/announce -udp://94-227-232-84.access.telenet.be:6969/announce -udp://epider.me:6969/announce -udp://camera.lei001.com:6969/announce -udp://tamas3.ynh.fr:6969/announce -https://tracker.tamersunion.org:443/announce -udp://ftp.pet:2710/announce -udp://p4p.arenabg.com:1337/announce -http://tracker.mywaifu.best:6969/announce -udp://tracker.monitorit4.me:6969/announce -udp://ipv6.tracker.monitorit4.me:6969/announce -zero://k5w77dozo3hy5zualyhni6vrh73iwfkaofa64abbilwyhhd3wgenbjqd.onion:15441 -zero://2kcb2fqesyaevc4lntogupa4mkdssth2ypfwczd2ov5a3zo6ytwwbayd.onion:15441 -zero://5vczpwawviukvd7grfhsfxp7a6huz77hlis4fstjkym5kmf4pu7i7myd.onion:15441 -zero://pn4q2zzt2pw4nk7yidxvsxmydko7dfibuzxdswi6gu6ninjpofvqs2id.onion:15441 -zero://6i54dd5th73oelv636ivix6sjnwfgk2qsltnyvswagwphub375t3xcad.onion:15441 -zero://tl74auz4tyqv4bieeclmyoe4uwtoc2dj7fdqv4nc4gl5j2bwg2r26bqd.onion:15441 -zero://wlxav3szbrdhest4j7dib2vgbrd7uj7u7rnuzg22cxbih7yxyg2hsmid.onion:15441 -zero://zy7wttvjtsijt5uwmlar4yguvjc2gppzbdj4v6bujng6xwjmkdg7uvqd.onion:15441 -zero://rlcjomszyitxpwv7kzopmqgzk3bdpsxeull4c3s6goszkk6h2sotfoad.onion:15441 -zero://gugt43coc5tkyrhrc3esf6t6aeycvcqzw7qafxrjpqbwt4ssz5czgzyd.onion:15441 -zero://ow7in4ftwsix5klcbdfqvfqjvimqshbm2o75rhtpdnsderrcbx74wbad.onion:15441 -zero://57hzgtu62yzxqgbvgxs7g3lfck3za4zrda7qkskar3tlak5recxcebyd.onion:15445 -zero://hb6ozikfiaafeuqvgseiik4r46szbpjfu66l67wjinnyv6dtopuwhtqd.onion:15445 -zero://qn65si4gtcwdiliq7vzrwu62qrweoxb6tx2cchwslaervj6szuje66qd.onion:26117 -zero://s3j2s5pjdfesbsmaqx6alsumaxxdxibmhv4eukmqpv3vqj6f627qx5yd.onion:15441 -zero://agufghdtniyfwty3wk55drxxwj2zxgzzo7dbrtje73gmvcpxy4ngs4ad.onion:15441 -zero://kgsvasoakvj4gnjiy7zemu34l3hq46dn5eauqkn76jpowmilci5t2vqd.onion:15445 -zero://dslesoe72bdfwfu4cfqa2wpd4hr3fhlu4zv6mfsjju5xlpmssouv36qd.onion:15441 -zero://f2hnjbggc3c2u2apvxdugirnk6bral54ibdoul3hhvu7pd4fso5fq3yd.onion:15441 -zero://skdeywpgm5xncpxbbr4cuiip6ey4dkambpanog6nruvmef4f3e7o47qd.onion:15441 -zero://tqmo2nffqo4qc5jgmz3me5eri3zpgf3v2zciufzmhnvznjve5c3argad.onion:15441 \ No newline at end of file diff --git a/update.py b/update.py index cf9898f9..3830dd37 100644 --- a/update.py +++ b/update.py @@ -1,120 +1,81 @@ +import urllib +import zipfile import os -import sys -import json +import ssl +import httplib +import socket import re -import shutil +import cStringIO as StringIO + +from gevent import monkey +monkey.patch_all() def update(): - from Config import config - config.parse(silent=True) + # Gevent https bug workaround (https://github.com/gevent/gevent/issues/477) + reload(socket) + reload(httplib) + reload(ssl) - if getattr(sys, 'source_update_dir', False): - if not os.path.isdir(sys.source_update_dir): - os.makedirs(sys.source_update_dir) - source_path = sys.source_update_dir.rstrip("/") - else: - source_path = os.getcwd().rstrip("/") - - if config.dist_type.startswith("bundle_linux"): - runtime_path = os.path.normpath(os.path.dirname(sys.executable) + "/../..") - else: - runtime_path = os.path.dirname(sys.executable) - - updatesite_path = config.data_dir + "/" + config.updatesite - - sites_json = json.load(open(config.data_dir + "/sites.json")) - updatesite_bad_files = sites_json.get(config.updatesite, {}).get("cache", {}).get("bad_files", {}) - print( - "Update site path: %s, bad_files: %s, source path: %s, runtime path: %s, dist type: %s" % - (updatesite_path, len(updatesite_bad_files), source_path, runtime_path, config.dist_type) - ) - - updatesite_content_json = json.load(open(updatesite_path + "/content.json")) - inner_paths = list(updatesite_content_json.get("files", {}).keys()) - inner_paths += list(updatesite_content_json.get("files_optional", {}).keys()) - - # Keep file only in ZeroNet directory - inner_paths = [inner_path for inner_path in inner_paths if re.match("^(core|bundle)", inner_path)] + print "Downloading.", + file = urllib.urlopen("https://github.com/HelloZeroNet/ZeroNet/archive/master.zip") + data = StringIO.StringIO() + while True: + buff = file.read(1024 * 16) + if not buff: + break + data.write(buff) + print ".", + print "Downloaded." # Checking plugins plugins_enabled = [] plugins_disabled = [] - if os.path.isdir("%s/plugins" % source_path): - for dir in os.listdir("%s/plugins" % source_path): + if os.path.isdir("plugins"): + for dir in os.listdir("plugins"): if dir.startswith("disabled-"): plugins_disabled.append(dir.replace("disabled-", "")) else: plugins_enabled.append(dir) - print("Plugins enabled:", plugins_enabled, "disabled:", plugins_disabled) - - update_paths = {} - - for inner_path in inner_paths: - if ".." in inner_path: - continue - inner_path = inner_path.replace("\\", "/").strip("/") # Make sure we have unix path - print(".", end=" ") - if inner_path.startswith("core"): - dest_path = source_path + "/" + re.sub("^core/", "", inner_path) - elif inner_path.startswith(config.dist_type): - dest_path = runtime_path + "/" + re.sub("^bundle[^/]+/", "", inner_path) - else: - continue + print "Plugins enabled:", plugins_enabled, "disabled:", plugins_disabled + print "Extracting...", + zip = zipfile.ZipFile(data) + for inner_path in zip.namelist(): + inner_path = inner_path.replace("\\", "/") # Make sure we have unix path + print ".", + dest_path = inner_path.replace("ZeroNet-master/", "") if not dest_path: continue # Keep plugin disabled/enabled status - match = re.match(re.escape(source_path) + "/plugins/([^/]+)", dest_path) + match = re.match("plugins/([^/]+)", dest_path) if match: plugin_name = match.group(1).replace("disabled-", "") if plugin_name in plugins_enabled: # Plugin was enabled dest_path = dest_path.replace("plugins/disabled-" + plugin_name, "plugins/" + plugin_name) elif plugin_name in plugins_disabled: # Plugin was disabled dest_path = dest_path.replace("plugins/" + plugin_name, "plugins/disabled-" + plugin_name) - print("P", end=" ") + print "P", dest_dir = os.path.dirname(dest_path) + if dest_dir and not os.path.isdir(dest_dir): os.makedirs(dest_dir) if dest_dir != dest_path.strip("/"): - update_paths[updatesite_path + "/" + inner_path] = dest_path - - num_ok = 0 - num_rename = 0 - num_error = 0 - for path_from, path_to in update_paths.items(): - print("-", path_from, "->", path_to) - if not os.path.isfile(path_from): - print("Missing file") - continue - - data = open(path_from, "rb").read() - - try: - open(path_to, 'wb').write(data) - num_ok += 1 - except Exception as err: + data = zip.read(inner_path) try: - print("Error writing: %s. Renaming old file as workaround..." % err) - path_to_tmp = path_to + "-old" - if os.path.isfile(path_to_tmp): - os.unlink(path_to_tmp) - os.rename(path_to, path_to_tmp) - num_rename += 1 - open(path_to, 'wb').write(data) - shutil.copymode(path_to_tmp, path_to) # Copy permissions - print("Write done after rename!") - num_ok += 1 - except Exception as err: - print("Write error after rename: %s" % err) - num_error += 1 - print("* Updated files: %s, renamed: %s, error: %s" % (num_ok, num_rename, num_error)) + open(dest_path, 'wb').write(data) + except Exception, err: + print dest_path, err + + print "Done." if __name__ == "__main__": - sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src")) # Imports relative to src - - update() + try: + update() + except Exception, err: + print "Update error: %s" % err + raw_input("Press enter to exit") diff --git a/zeronet.py b/zeronet.py old mode 100755 new mode 100644 index 457efb19..959699c8 --- a/zeronet.py +++ b/zeronet.py @@ -1,138 +1,59 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python + +# Included modules import os import sys def main(): - if sys.version_info.major < 3: - print("Error: Python 3.x is required") - sys.exit(0) - - if "--silent" not in sys.argv: - print("- Starting ZeroNet...") + print "- Starting ZeroNet..." main = None try: + sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src")) # Imports relative to src import main main.start() - except Exception as err: # Prevent closing - import traceback - try: - import logging - logging.exception("Unhandled exception: %s" % err) - except Exception as log_err: - print("Failed to log error:", log_err) - traceback.print_exc() - from Config import config - error_log_path = config.log_dir + "/error.log" - traceback.print_exc(file=open(error_log_path, "w")) - print("---") - print("Please report it: https://github.com/HelloZeroNet/ZeroNet/issues/new?assignees=&labels=&template=bug-report.md") - if sys.platform.startswith("win") and "python.exe" not in sys.executable: - displayErrorMessage(err, error_log_path) - - if main and (main.update_after_shutdown or main.restart_after_shutdown): # Updater - if main.update_after_shutdown: - print("Shutting down...") - prepareShutdown() + if main.update_after_shutdown: # Updater + import gc import update - print("Updating...") + # Try cleanup openssl + try: + if "lib.opensslVerify" in sys.modules: + sys.modules["lib.opensslVerify"].opensslVerify.closeLibrary() + except Exception, err: + print "Error closing opensslVerify lib", err + try: + if "lib.pyelliptic" in sys.modules: + sys.modules["lib.pyelliptic"].openssl.closeLibrary() + except Exception, err: + print "Error closing pyelliptic lib", err + + # Update update.update() - if main.restart_after_shutdown: - print("Restarting...") - restart() - else: - print("Shutting down...") - prepareShutdown() - print("Restarting...") - restart() + # Close log files + logger = sys.modules["main"].logging.getLogger() -def displayErrorMessage(err, error_log_path): - import ctypes - import urllib.parse - import subprocess + for handler in logger.handlers[:]: + handler.flush() + handler.close() + logger.removeHandler(handler) - MB_YESNOCANCEL = 0x3 - MB_ICONEXCLAIMATION = 0x30 + except (Exception, ): # Prevent closing + import traceback + traceback.print_exc() + traceback.print_exc(file=open("log/error.log", "a")) - ID_YES = 0x6 - ID_NO = 0x7 - ID_CANCEL = 0x2 - - err_message = "%s: %s" % (type(err).__name__, err) - err_title = "Unhandled exception: %s\nReport error?" % err_message - - res = ctypes.windll.user32.MessageBoxW(0, err_title, "ZeroNet error", MB_YESNOCANCEL | MB_ICONEXCLAIMATION) - if res == ID_YES: - import webbrowser - report_url = "https://github.com/ZeroNetX/ZeroNet/issues/new?assignees=&labels=&template=bug-report.md&title=%s" - webbrowser.open(report_url % urllib.parse.quote("Unhandled exception: %s" % err_message)) - if res in [ID_YES, ID_NO]: - subprocess.Popen(['notepad.exe', error_log_path]) - -def prepareShutdown(): - import atexit - atexit._run_exitfuncs() - - # Close log files - if "main" in sys.modules: - logger = sys.modules["main"].logging.getLogger() - - for handler in logger.handlers[:]: - handler.flush() - handler.close() - logger.removeHandler(handler) - - import time - time.sleep(1) # Wait for files to close - -def restart(): - args = sys.argv[:] - - sys.executable = sys.executable.replace(".pkg", "") # Frozen mac fix - - if not getattr(sys, 'frozen', False): + if main and main.update_after_shutdown: # Updater + # Restart + gc.collect() # Garbage collect + print "Restarting..." + args = sys.argv[:] args.insert(0, sys.executable) - - # Don't open browser after restart - if "--open_browser" in args: - del args[args.index("--open_browser") + 1] # argument value - del args[args.index("--open_browser")] # argument key - - if getattr(sys, 'frozen', False): - pos_first_arg = 1 # Only the executable - else: - pos_first_arg = 2 # Interpter, .py file path - - args.insert(pos_first_arg, "--open_browser") - args.insert(pos_first_arg + 1, "False") - - if sys.platform == 'win32': - args = ['"%s"' % arg for arg in args] - - try: - print("Executing %s %s" % (sys.executable, args)) + if sys.platform == 'win32': + args = ['"%s"' % arg for arg in args] os.execv(sys.executable, args) - except Exception as err: - print("Execv error: %s" % err) - print("Bye.") - - -def start(): - app_dir = os.path.dirname(os.path.abspath(__file__)) - os.chdir(app_dir) # Change working dir to zeronet.py dir - sys.path.insert(0, os.path.join(app_dir, "src/lib")) # External liblary directory - sys.path.insert(0, os.path.join(app_dir, "src")) # Imports relative to src - - if "--update" in sys.argv: - sys.argv.remove("--update") - print("Updating...") - import update - update.update() - else: - main() - + print "Bye." if __name__ == '__main__': - start() + main()