diff --git a/plugins/Benchmark/BenchmarkDb.py b/plugins/Benchmark/BenchmarkDb.py
new file mode 100644
index 00000000..6aa3a028
--- /dev/null
+++ b/plugins/Benchmark/BenchmarkDb.py
@@ -0,0 +1,141 @@
+import os
+import json
+import contextlib
+import time
+
+from Plugin import PluginManager
+from Config import config
+
+
+@PluginManager.registerTo("Actions")
+class ActionsPlugin:
+ def getBenchmarkTests(self, online=False):
+ tests = super().getBenchmarkTests(online)
+ tests.extend([
+ {"func": self.testDbConnect, "num": 10, "time_standard": 0.27},
+ {"func": self.testDbInsert, "num": 10, "time_standard": 0.91},
+ {"func": self.testDbInsertMultiuser, "num": 1, "time_standard": 0.57},
+ {"func": self.testDbQueryIndexed, "num": 1000, "time_standard": 0.84},
+ {"func": self.testDbQueryNotIndexed, "num": 1000, "time_standard": 1.30}
+ ])
+ return tests
+
+
+ @contextlib.contextmanager
+ def getTestDb(self):
+ from Db import Db
+ path = "%s/benchmark.db" % config.data_dir
+ if os.path.isfile(path):
+ os.unlink(path)
+ schema = {
+ "db_name": "TestDb",
+ "db_file": path,
+ "maps": {
+ ".*": {
+ "to_table": {
+ "test": "test"
+ }
+ }
+ },
+ "tables": {
+ "test": {
+ "cols": [
+ ["test_id", "INTEGER"],
+ ["title", "TEXT"],
+ ["json_id", "INTEGER REFERENCES json (json_id)"]
+ ],
+ "indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"],
+ "schema_changed": 1426195822
+ }
+ }
+ }
+
+ db = Db.Db(schema, path)
+
+ yield db
+
+ db.close()
+ if os.path.isfile(path):
+ os.unlink(path)
+
+ def testDbConnect(self, num_run=1):
+ import sqlite3
+ for i in range(num_run):
+ with self.getTestDb() as db:
+ db.checkTables()
+ yield "."
+ yield "(SQLite version: %s, API: %s)" % (sqlite3.sqlite_version, sqlite3.version)
+
+ def testDbInsert(self, num_run=1):
+ yield "x 1000 lines "
+ for u in range(num_run):
+ with self.getTestDb() as db:
+ db.checkTables()
+ data = {"test": []}
+ for i in range(1000): # 1000 line of data
+ data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
+ json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
+ db.updateJson("%s/test_%s.json" % (config.data_dir, u))
+ os.unlink("%s/test_%s.json" % (config.data_dir, u))
+ assert db.execute("SELECT COUNT(*) FROM test").fetchone()[0] == 1000
+ yield "."
+
+ def fillTestDb(self, db):
+ db.checkTables()
+ cur = db.getCursor()
+ cur.logging = False
+ for u in range(100, 200): # 100 user
+ data = {"test": []}
+ for i in range(100): # 1000 line of data
+ data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
+ json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
+ db.updateJson("%s/test_%s.json" % (config.data_dir, u), cur=cur)
+ os.unlink("%s/test_%s.json" % (config.data_dir, u))
+ if u % 10 == 0:
+ yield "."
+
+ def testDbInsertMultiuser(self, num_run=1):
+ yield "x 100 users x 100 lines "
+ for u in range(num_run):
+ with self.getTestDb() as db:
+ for progress in self.fillTestDb(db):
+ yield progress
+ num_rows = db.execute("SELECT COUNT(*) FROM test").fetchone()[0]
+ assert num_rows == 10000, "%s != 10000" % num_rows
+
+ def testDbQueryIndexed(self, num_run=1):
+ s = time.time()
+ with self.getTestDb() as db:
+ for progress in self.fillTestDb(db):
+ pass
+ yield " (Db warmup done in %.3fs) " % (time.time() - s)
+ found_total = 0
+ for i in range(num_run): # 1000x by test_id
+ found = 0
+ res = db.execute("SELECT * FROM test WHERE test_id = %s" % (i % 100))
+ for row in res:
+ found_total += 1
+ found += 1
+ yield "."
+ assert found == 100, "%s != 100 (i: %s)" % (found, i)
+ yield "Found: %s" % found_total
+
+ def testDbQueryNotIndexed(self, num_run=1):
+ s = time.time()
+ with self.getTestDb() as db:
+ for progress in self.fillTestDb(db):
+ pass
+ yield " (Db warmup done in %.3fs) " % (time.time() - s)
+ found_total = 0
+ for i in range(num_run): # 1000x by test_id
+ found = 0
+ res = db.execute("SELECT * FROM test WHERE json_id = %s" % i)
+ for row in res:
+ found_total += 1
+ found += 1
+ yield "."
+ if i == 0 or i > 100:
+ assert found == 0, "%s != 0 (i: %s)" % (found, i)
+ else:
+ assert found == 100, "%s != 100 (i: %s)" % (found, i)
+ yield "Found: %s" % found_total
diff --git a/plugins/Benchmark/BenchmarkPack.py b/plugins/Benchmark/BenchmarkPack.py
new file mode 100644
index 00000000..6b92e43a
--- /dev/null
+++ b/plugins/Benchmark/BenchmarkPack.py
@@ -0,0 +1,183 @@
+import os
+import io
+from collections import OrderedDict
+
+from Plugin import PluginManager
+from Config import config
+from util import Msgpack
+
+
+@PluginManager.registerTo("Actions")
+class ActionsPlugin:
+ def createZipFile(self, path):
+ import zipfile
+ test_data = b"Test" * 1024
+ file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91%s.txt".decode("utf8")
+ with zipfile.ZipFile(path, 'w') as archive:
+ for y in range(100):
+ zip_info = zipfile.ZipInfo(file_name % y, (1980, 1, 1, 0, 0, 0))
+ zip_info.compress_type = zipfile.ZIP_DEFLATED
+ zip_info.create_system = 3
+ zip_info.flag_bits = 0
+ zip_info.external_attr = 25165824
+ archive.writestr(zip_info, test_data)
+
+ def testPackZip(self, num_run=1):
+ """
+ Test zip file creating
+ """
+ yield "x 100 x 5KB "
+ from Crypt import CryptHash
+ zip_path = '%s/test.zip' % config.data_dir
+ for i in range(num_run):
+ self.createZipFile(zip_path)
+ yield "."
+
+ archive_size = os.path.getsize(zip_path) / 1024
+ yield "(Generated file size: %.2fkB)" % archive_size
+
+ hash = CryptHash.sha512sum(open(zip_path, "rb"))
+ valid = "cb32fb43783a1c06a2170a6bc5bb228a032b67ff7a1fd7a5efb9b467b400f553"
+ assert hash == valid, "Invalid hash: %s != %s
" % (hash, valid)
+ os.unlink(zip_path)
+
+ def testUnpackZip(self, num_run=1):
+ """
+ Test zip file reading
+ """
+ yield "x 100 x 5KB "
+ import zipfile
+ zip_path = '%s/test.zip' % config.data_dir
+ test_data = b"Test" * 1024
+ file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91".decode("utf8")
+
+ self.createZipFile(zip_path)
+ for i in range(num_run):
+ with zipfile.ZipFile(zip_path) as archive:
+ for f in archive.filelist:
+ assert f.filename.startswith(file_name), "Invalid filename: %s != %s" % (f.filename, file_name)
+ data = archive.open(f.filename).read()
+ assert archive.open(f.filename).read() == test_data, "Invalid data: %s..." % data[0:30]
+ yield "."
+
+ os.unlink(zip_path)
+
+ def createArchiveFile(self, path, archive_type="gz"):
+ import tarfile
+ import gzip
+
+ # Monkey patch _init_write_gz to use fixed date in order to keep the hash independent from datetime
+ def nodate_write_gzip_header(self):
+ self._write_mtime = 0
+ original_write_gzip_header(self)
+
+ test_data_io = io.BytesIO(b"Test" * 1024)
+ file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91%s.txt".decode("utf8")
+
+ original_write_gzip_header = gzip.GzipFile._write_gzip_header
+ gzip.GzipFile._write_gzip_header = nodate_write_gzip_header
+ with tarfile.open(path, 'w:%s' % archive_type) as archive:
+ for y in range(100):
+ test_data_io.seek(0)
+ tar_info = tarfile.TarInfo(file_name % y)
+ tar_info.size = 4 * 1024
+ archive.addfile(tar_info, test_data_io)
+
+ def testPackArchive(self, num_run=1, archive_type="gz"):
+ """
+ Test creating tar archive files
+ """
+ yield "x 100 x 5KB "
+ from Crypt import CryptHash
+
+ hash_valid_db = {
+ "gz": "92caec5121a31709cbbc8c11b0939758e670b055bbbe84f9beb3e781dfde710f",
+ "bz2": "b613f41e6ee947c8b9b589d3e8fa66f3e28f63be23f4faf015e2f01b5c0b032d",
+ "xz": "ae43892581d770959c8d993daffab25fd74490b7cf9fafc7aaee746f69895bcb",
+ }
+ archive_path = '%s/test.tar.%s' % (config.data_dir, archive_type)
+ for i in range(num_run):
+ self.createArchiveFile(archive_path, archive_type=archive_type)
+ yield "."
+
+ archive_size = os.path.getsize(archive_path) / 1024
+ yield "(Generated file size: %.2fkB)" % archive_size
+
+ hash = CryptHash.sha512sum(open("%s/test.tar.%s" % (config.data_dir, archive_type), "rb"))
+ valid = hash_valid_db[archive_type]
+ assert hash == valid, "Invalid hash: %s != %s
" % (hash, valid)
+
+ if os.path.isfile(archive_path):
+ os.unlink(archive_path)
+
+ def testUnpackArchive(self, num_run=1, archive_type="gz"):
+ """
+ Test reading tar archive files
+ """
+ yield "x 100 x 5KB "
+ import tarfile
+
+ test_data = b"Test" * 1024
+ file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91%s.txt".decode("utf8")
+ archive_path = '%s/test.tar.%s' % (config.data_dir, archive_type)
+ self.createArchiveFile(archive_path, archive_type=archive_type)
+ for i in range(num_run):
+ with tarfile.open(archive_path, 'r:%s' % archive_type) as archive:
+ for y in range(100):
+ assert archive.extractfile(file_name % y).read() == test_data
+ yield "."
+ if os.path.isfile(archive_path):
+ os.unlink(archive_path)
+
+ def testPackMsgpack(self, num_run=1):
+ """
+ Test msgpack encoding
+ """
+ yield "x 100 x 5KB "
+ binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv'
+ data = OrderedDict(
+ sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items())
+ )
+ data_packed_valid = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00'
+ data_packed_valid += b'hello' * 1024
+ for y in range(num_run):
+ for i in range(100):
+ data_packed = Msgpack.pack(data)
+ yield "."
+ assert data_packed == data_packed_valid, "%s
!=
%s" % (repr(data_packed), repr(data_packed_valid))
+
+ def testUnpackMsgpack(self, num_run=1):
+ """
+ Test msgpack decoding
+ """
+ yield "x 5KB "
+ binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv'
+ data = OrderedDict(
+ sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items())
+ )
+ data_packed = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00'
+ data_packed += b'hello' * 1024
+ for y in range(num_run):
+ data_unpacked = Msgpack.unpack(data_packed, decode=False)
+ yield "."
+ assert data_unpacked == data, "%s
!=
%s" % (data_unpacked, data)
+
+ def testUnpackMsgpackStreaming(self, num_run=1, fallback=False):
+ """
+ Test streaming msgpack decoding
+ """
+ yield "x 1000 x 5KB "
+ binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv'
+ data = OrderedDict(
+ sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items())
+ )
+ data_packed = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00'
+ data_packed += b'hello' * 1024
+ for i in range(num_run):
+ unpacker = Msgpack.getUnpacker(decode=False, fallback=fallback)
+ for y in range(1000):
+ unpacker.feed(data_packed)
+ for data_unpacked in unpacker:
+ pass
+ yield "."
+ assert data == data_unpacked, "%s != %s" % (data_unpacked, data)
diff --git a/plugins/Benchmark/BenchmarkPlugin.py b/plugins/Benchmark/BenchmarkPlugin.py
new file mode 100644
index 00000000..eb3c31c0
--- /dev/null
+++ b/plugins/Benchmark/BenchmarkPlugin.py
@@ -0,0 +1,350 @@
+import os
+import time
+import io
+import math
+import hashlib
+import re
+
+from Config import config
+from Crypt import CryptHash
+from Plugin import PluginManager
+from Debug import Debug
+from util import helper
+
+plugin_dir = os.path.dirname(__file__)
+
+benchmark_key = None
+
+
+@PluginManager.registerTo("UiRequest")
+class UiRequestPlugin(object):
+ @helper.encodeResponse
+ def actionBenchmark(self):
+ global benchmark_key
+ script_nonce = self.getScriptNonce()
+ if not benchmark_key:
+ benchmark_key = CryptHash.random(encoding="base64")
+ self.sendHeader(script_nonce=script_nonce)
+
+ if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
+ yield "This function is disabled on this proxy"
+ return
+
+ data = self.render(
+ plugin_dir + "/media/benchmark.html",
+ script_nonce=script_nonce,
+ benchmark_key=benchmark_key,
+ filter=re.sub("[^A-Za-z0-9]", "", self.get.get("filter", ""))
+ )
+ yield data
+
+ @helper.encodeResponse
+ def actionBenchmarkResult(self):
+ global benchmark_key
+ if self.get.get("benchmark_key", "") != benchmark_key:
+ return self.error403("Invalid benchmark key")
+
+ self.sendHeader(content_type="text/plain", noscript=True)
+
+ if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
+ yield "This function is disabled on this proxy"
+ return
+
+ yield " " * 1024 # Head (required for streaming)
+
+ import main
+ s = time.time()
+
+ for part in main.actions.testBenchmark(filter=self.get.get("filter", "")):
+ yield part
+
+ yield "\n - Total time: %.3fs" % (time.time() - s)
+
+
+@PluginManager.registerTo("Actions")
+class ActionsPlugin:
+ def getMultiplerTitle(self, multipler):
+ if multipler < 0.3:
+ multipler_title = "Sloooow"
+ elif multipler < 0.6:
+ multipler_title = "Ehh"
+ elif multipler < 0.8:
+ multipler_title = "Goodish"
+ elif multipler < 1.2:
+ multipler_title = "OK"
+ elif multipler < 1.7:
+ multipler_title = "Fine"
+ elif multipler < 2.5:
+ multipler_title = "Fast"
+ elif multipler < 3.5:
+ multipler_title = "WOW"
+ else:
+ multipler_title = "Insane!!"
+ return multipler_title
+
+ def formatResult(self, taken, standard):
+ if not standard:
+ return " Done in %.3fs" % taken
+
+ if taken > 0:
+ multipler = standard / taken
+ else:
+ multipler = 99
+ multipler_title = self.getMultiplerTitle(multipler)
+
+ return " Done in %.3fs = %s (%.2fx)" % (taken, multipler_title, multipler)
+
+ def getBenchmarkTests(self, online=False):
+ tests = [
+ {"func": self.testHdPrivatekey, "num": 50, "time_standard": 0.57},
+ {"func": self.testSign, "num": 20, "time_standard": 0.46},
+ {"func": self.testVerify, "kwargs": {"lib_verify": "btctools"}, "num": 20, "time_standard": 0.38},
+ {"func": self.testVerify, "kwargs": {"lib_verify": "openssl"}, "num": 200, "time_standard": 0.30},
+ {"func": self.testVerify, "kwargs": {"lib_verify": "libsecp256k1"}, "num": 200, "time_standard": 0.10},
+
+ {"func": self.testPackMsgpack, "num": 100, "time_standard": 0.35},
+ {"func": self.testUnpackMsgpackStreaming, "kwargs": {"fallback": False}, "num": 100, "time_standard": 0.35},
+ {"func": self.testUnpackMsgpackStreaming, "kwargs": {"fallback": True}, "num": 10, "time_standard": 0.5},
+
+ {"func": self.testPackZip, "num": 5, "time_standard": 0.065},
+ {"func": self.testPackArchive, "kwargs": {"archive_type": "gz"}, "num": 5, "time_standard": 0.08},
+ {"func": self.testPackArchive, "kwargs": {"archive_type": "bz2"}, "num": 5, "time_standard": 0.68},
+ {"func": self.testPackArchive, "kwargs": {"archive_type": "xz"}, "num": 5, "time_standard": 0.47},
+ {"func": self.testUnpackZip, "num": 20, "time_standard": 0.25},
+ {"func": self.testUnpackArchive, "kwargs": {"archive_type": "gz"}, "num": 20, "time_standard": 0.28},
+ {"func": self.testUnpackArchive, "kwargs": {"archive_type": "bz2"}, "num": 20, "time_standard": 0.83},
+ {"func": self.testUnpackArchive, "kwargs": {"archive_type": "xz"}, "num": 20, "time_standard": 0.38},
+
+ {"func": self.testCryptHash, "kwargs": {"hash_type": "sha256"}, "num": 10, "time_standard": 0.50},
+ {"func": self.testCryptHash, "kwargs": {"hash_type": "sha512"}, "num": 10, "time_standard": 0.33},
+ {"func": self.testCryptHashlib, "kwargs": {"hash_type": "sha3_256"}, "num": 10, "time_standard": 0.33},
+ {"func": self.testCryptHashlib, "kwargs": {"hash_type": "sha3_512"}, "num": 10, "time_standard": 0.65},
+
+ {"func": self.testRandom, "num": 100, "time_standard": 0.08},
+ ]
+ if online:
+ tests += [
+ {"func": self.testHttps, "num": 1, "time_standard": 2.1}
+ ]
+ return tests
+
+ def testBenchmark(self, num_multipler=1, online=False, num_run=None, filter=None):
+ """
+ Run benchmark on client functions
+ """
+ tests = self.getBenchmarkTests(online=online)
+
+ if filter:
+ tests = [test for test in tests[:] if filter.lower() in test["func"].__name__.lower()]
+
+ yield "\n"
+ res = {}
+ multiplers = []
+ for test in tests:
+ s = time.time()
+ if num_run:
+ num_run_test = num_run
+ else:
+ num_run_test = math.ceil(test["num"] * num_multipler)
+ func = test["func"]
+ func_name = func.__name__
+ kwargs = test.get("kwargs", {})
+ key = "%s %s" % (func_name, kwargs)
+ if kwargs:
+ yield "* Running %s (%s) x %s " % (func_name, kwargs, num_run_test)
+ else:
+ yield "* Running %s x %s " % (func_name, num_run_test)
+ i = 0
+ try:
+ for progress in func(num_run_test, **kwargs):
+ i += 1
+ if num_run_test > 10:
+ should_print = i % (num_run_test / 10) == 0 or progress != "."
+ else:
+ should_print = True
+
+ if should_print:
+ if num_run_test == 1 and progress == ".":
+ progress = "..."
+ yield progress
+ time_taken = time.time() - s
+ if num_run:
+ time_standard = 0
+ else:
+ time_standard = test["time_standard"] * num_multipler
+ yield self.formatResult(time_taken, time_standard)
+ yield "\n"
+ res[key] = "ok"
+ multiplers.append(time_standard / time_taken)
+ except Exception as err:
+ res[key] = err
+ yield "Failed!\n! Error: %s\n\n" % Debug.formatException(err)
+
+ if not res:
+ yield "! No tests found"
+ else:
+ yield "* Result:\n"
+ yield " - Total: %s tests\n" % len(res)
+ yield " - Success: %s tests\n" % len([res_key for res_key, res_val in res.items() if res_val == "ok"])
+ yield " - Failed: %s tests\n" % len([res_key for res_key, res_val in res.items() if res_val != "ok"])
+ if multiplers:
+ multipler_avg = sum(multiplers) / len(multiplers)
+ multipler_title = self.getMultiplerTitle(multipler_avg)
+ yield " - Average speed factor: %.2fx (%s)" % (multipler_avg, multipler_title)
+
+ def testHttps(self, num_run=1):
+ """
+ Test https connection with valid and invalid certs
+ """
+ import urllib.request
+ import urllib.error
+
+ body = urllib.request.urlopen("https://google.com").read()
+ assert len(body) > 100
+ yield "."
+
+ badssl_urls = [
+ "https://expired.badssl.com/",
+ "https://wrong.host.badssl.com/",
+ "https://self-signed.badssl.com/",
+ "https://untrusted-root.badssl.com/"
+ ]
+ for badssl_url in badssl_urls:
+ try:
+ body = urllib.request.urlopen(badssl_url).read()
+ https_err = None
+ except urllib.error.URLError as err:
+ https_err = err
+ assert https_err
+ yield "."
+
+ def testCryptHash(self, num_run=1, hash_type="sha256"):
+ """
+ Test hashing functions
+ """
+ yield "(5MB) "
+
+ from Crypt import CryptHash
+
+ hash_types = {
+ "sha256": {"func": CryptHash.sha256sum, "hash_valid": "8cd629d9d6aff6590da8b80782a5046d2673d5917b99d5603c3dcb4005c45ffa"},
+ "sha512": {"func": CryptHash.sha512sum, "hash_valid": "9ca7e855d430964d5b55b114e95c6bbb114a6d478f6485df93044d87b108904d"}
+ }
+ hash_func = hash_types[hash_type]["func"]
+ hash_valid = hash_types[hash_type]["hash_valid"]
+
+ data = io.BytesIO(b"Hello" * 1024 * 1024) # 5MB
+ for i in range(num_run):
+ data.seek(0)
+ hash = hash_func(data)
+ yield "."
+ assert hash == hash_valid, "%s != %s" % (hash, hash_valid)
+
+ def testCryptHashlib(self, num_run=1, hash_type="sha3_256"):
+ """
+ Test SHA3 hashing functions
+ """
+ yield "x 5MB "
+
+ hash_types = {
+ "sha3_256": {"func": hashlib.sha3_256, "hash_valid": "c8aeb3ef9fe5d6404871c0d2a4410a4d4e23268e06735648c9596f436c495f7e"},
+ "sha3_512": {"func": hashlib.sha3_512, "hash_valid": "b75dba9472d8af3cc945ce49073f3f8214d7ac12086c0453fb08944823dee1ae83b3ffbc87a53a57cc454521d6a26fe73ff0f3be38dddf3f7de5d7692ebc7f95"},
+ }
+
+ hash_func = hash_types[hash_type]["func"]
+ hash_valid = hash_types[hash_type]["hash_valid"]
+
+ data = io.BytesIO(b"Hello" * 1024 * 1024) # 5MB
+ for i in range(num_run):
+ data.seek(0)
+ h = hash_func()
+ while 1:
+ buff = data.read(1024 * 64)
+ if not buff:
+ break
+ h.update(buff)
+ hash = h.hexdigest()
+ yield "."
+ assert hash == hash_valid, "%s != %s" % (hash, hash_valid)
+
+ def testRandom(self, num_run=1):
+ """
+ Test generating random data
+ """
+ yield "x 1000 x 256 bytes "
+ for i in range(num_run):
+ data_last = None
+ for y in range(1000):
+ data = os.urandom(256)
+ assert data != data_last
+ assert len(data) == 256
+ data_last = data
+ yield "."
+
+ def testHdPrivatekey(self, num_run=2):
+ """
+ Test generating deterministic private keys from a master seed
+ """
+ from Crypt import CryptBitcoin
+ seed = "e180efa477c63b0f2757eac7b1cce781877177fe0966be62754ffd4c8592ce38"
+ privatekeys = []
+ for i in range(num_run):
+ privatekeys.append(CryptBitcoin.hdPrivatekey(seed, i * 10))
+ yield "."
+ valid = "5JSbeF5PevdrsYjunqpg7kAGbnCVYa1T4APSL3QRu8EoAmXRc7Y"
+ assert privatekeys[0] == valid, "%s != %s" % (privatekeys[0], valid)
+ if len(privatekeys) > 1:
+ assert privatekeys[0] != privatekeys[-1]
+
+ def testSign(self, num_run=1):
+ """
+ Test signing data using a private key
+ """
+ from Crypt import CryptBitcoin
+ data = "Hello" * 1024
+ privatekey = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
+ for i in range(num_run):
+ yield "."
+ sign = CryptBitcoin.sign(data, privatekey)
+ valid = "G1GXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOiBHB+kp4cRPZOL7l1yqK5BHa6J+W97bMjvTXtxzljp6w="
+ assert sign == valid, "%s != %s" % (sign, valid)
+
+ def testVerify(self, num_run=1, lib_verify="btctools"):
+ """
+ Test verification of generated signatures
+ """
+ from Crypt import CryptBitcoin
+ CryptBitcoin.loadLib(lib_verify, silent=True)
+
+ data = "Hello" * 1024
+ privatekey = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
+ address = CryptBitcoin.privatekeyToAddress(privatekey)
+ sign = "G1GXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOiBHB+kp4cRPZOL7l1yqK5BHa6J+W97bMjvTXtxzljp6w="
+
+ for i in range(num_run):
+ ok = CryptBitcoin.verify(data, address, sign, lib_verify=lib_verify)
+ yield "."
+ assert ok, "does not verify from %s" % address
+
+ def testAll(self):
+ """
+ Run all tests to check system compatibility with ZeroNet functions
+ """
+ for progress in self.testBenchmark(online=not config.offline, num_run=1):
+ yield progress
+
+
+@PluginManager.registerTo("ConfigPlugin")
+class ConfigPlugin(object):
+ def createArguments(self):
+ back = super(ConfigPlugin, self).createArguments()
+ if self.getCmdlineValue("test") == "benchmark":
+ self.test_parser.add_argument(
+ '--num_multipler', help='Benchmark run time multipler',
+ default=1.0, type=float, metavar='num'
+ )
+ self.test_parser.add_argument(
+ '--filter', help='Filter running benchmark',
+ default=None, metavar='test name'
+ )
+ return back
diff --git a/plugins/Benchmark/__init__.py b/plugins/Benchmark/__init__.py
new file mode 100644
index 00000000..76a5ae9c
--- /dev/null
+++ b/plugins/Benchmark/__init__.py
@@ -0,0 +1,3 @@
+from . import BenchmarkPlugin
+from . import BenchmarkDb
+from . import BenchmarkPack
diff --git a/plugins/Benchmark/media/benchmark.html b/plugins/Benchmark/media/benchmark.html
new file mode 100644
index 00000000..1d63cf86
--- /dev/null
+++ b/plugins/Benchmark/media/benchmark.html
@@ -0,0 +1,123 @@
+
+
+