rev280, The whole project reformatted to PEP8, UiRequest getPosted to query posted variables
This commit is contained in:
parent
a5741704e4
commit
b5ecb62bc6
49 changed files with 5704 additions and 5205 deletions
|
@ -1,47 +1,51 @@
|
|||
import sys, os, traceback
|
||||
import sys
|
||||
import os
|
||||
import traceback
|
||||
|
||||
|
||||
# Non fatal exception
|
||||
class Notify(Exception):
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
def __str__(self):
|
||||
return self.message
|
||||
|
||||
|
||||
def formatException(err=None):
|
||||
if type(err) == Notify: return err
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
if not err: err = exc_obj.message
|
||||
tb = []
|
||||
for frame in traceback.extract_tb(exc_tb):
|
||||
path, line, function, text = frame
|
||||
file = os.path.split(path)[1]
|
||||
tb.append("%s line %s" % (file, line))
|
||||
return "%s: %s in %s" % (exc_type.__name__, err, " > ".join(tb))
|
||||
if type(err) == Notify:
|
||||
return err
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
if not err:
|
||||
err = exc_obj.message
|
||||
tb = []
|
||||
for frame in traceback.extract_tb(exc_tb):
|
||||
path, line, function, text = frame
|
||||
file = os.path.split(path)[1]
|
||||
tb.append("%s line %s" % (file, line))
|
||||
return "%s: %s in %s" % (exc_type.__name__, err, " > ".join(tb))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
print 1 / 0
|
||||
except Exception, err:
|
||||
print type(err).__name__
|
||||
print "1/0 error: %s" % formatException(err)
|
||||
|
||||
try:
|
||||
print 1/0
|
||||
except Exception, err:
|
||||
print type(err).__name__
|
||||
print "1/0 error: %s" % formatException(err)
|
||||
|
||||
def loadJson():
|
||||
json.loads("Errr")
|
||||
def loadJson():
|
||||
json.loads("Errr")
|
||||
|
||||
import json
|
||||
try:
|
||||
loadJson()
|
||||
except Exception, err:
|
||||
print err
|
||||
print "Json load error: %s" % formatException(err)
|
||||
import json
|
||||
try:
|
||||
loadJson()
|
||||
except Exception, err:
|
||||
print err
|
||||
print "Json load error: %s" % formatException(err)
|
||||
|
||||
try:
|
||||
raise Notify("nothing...")
|
||||
except Exception, err:
|
||||
print "Notify: %s" % formatException(err)
|
||||
try:
|
||||
raise Notify("nothing...")
|
||||
except Exception, err:
|
||||
print "Notify: %s" % formatException(err)
|
||||
|
||||
loadJson()
|
||||
loadJson()
|
||||
|
|
|
@ -1,55 +1,64 @@
|
|||
import gevent, sys, logging
|
||||
import sys
|
||||
import logging
|
||||
|
||||
import gevent
|
||||
|
||||
from Config import config
|
||||
|
||||
last_error = None
|
||||
|
||||
|
||||
# Store last error, ignore notify, allow manual error logging
|
||||
def handleError(*args):
|
||||
global last_error
|
||||
if not args: # Manual called
|
||||
args = sys.exc_info()
|
||||
silent = True
|
||||
else:
|
||||
silent = False
|
||||
if args[0].__name__ != "Notify": last_error = args
|
||||
if not silent and args[0].__name__ != "Notify":
|
||||
logging.exception("Unhandled exception")
|
||||
sys.__excepthook__(*args)
|
||||
global last_error
|
||||
if not args: # Manual called
|
||||
args = sys.exc_info()
|
||||
silent = True
|
||||
else:
|
||||
silent = False
|
||||
if args[0].__name__ != "Notify":
|
||||
last_error = args
|
||||
if not silent and args[0].__name__ != "Notify":
|
||||
logging.exception("Unhandled exception")
|
||||
sys.__excepthook__(*args)
|
||||
|
||||
|
||||
# Ignore notify errors
|
||||
def handleErrorNotify(*args):
|
||||
if args[0].__name__ != "Notify":
|
||||
logging.exception("Unhandled exception")
|
||||
sys.__excepthook__(*args)
|
||||
if args[0].__name__ != "Notify":
|
||||
logging.exception("Unhandled exception")
|
||||
sys.__excepthook__(*args)
|
||||
|
||||
|
||||
OriginalGreenlet = gevent.Greenlet
|
||||
|
||||
|
||||
class ErrorhookedGreenlet(OriginalGreenlet):
|
||||
def _report_error(self, exc_info):
|
||||
sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
|
||||
def _report_error(self, exc_info):
|
||||
sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
|
||||
|
||||
if config.debug:
|
||||
sys.excepthook = handleError
|
||||
sys.excepthook = handleError
|
||||
else:
|
||||
sys.excepthook = handleErrorNotify
|
||||
sys.excepthook = handleErrorNotify
|
||||
|
||||
gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet
|
||||
reload(gevent)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import time
|
||||
from gevent import monkey; monkey.patch_all(thread=False, ssl=False)
|
||||
import Debug
|
||||
def sleeper():
|
||||
print "started"
|
||||
time.sleep(3)
|
||||
print "stopped"
|
||||
thread1 = gevent.spawn(sleeper)
|
||||
thread2 = gevent.spawn(sleeper)
|
||||
time.sleep(1)
|
||||
print "killing..."
|
||||
thread1.throw(Exception("Hello"))
|
||||
thread2.throw(Debug.Notify("Throw"))
|
||||
print "killed"
|
||||
import time
|
||||
from gevent import monkey
|
||||
monkey.patch_all(thread=False, ssl=False)
|
||||
import Debug
|
||||
|
||||
def sleeper():
|
||||
print "started"
|
||||
time.sleep(3)
|
||||
print "stopped"
|
||||
thread1 = gevent.spawn(sleeper)
|
||||
thread2 = gevent.spawn(sleeper)
|
||||
time.sleep(1)
|
||||
print "killing..."
|
||||
thread1.throw(Exception("Hello"))
|
||||
thread2.throw(Debug.Notify("Throw"))
|
||||
print "killed"
|
||||
|
|
|
@ -1,81 +1,91 @@
|
|||
import os, subprocess, re, logging, time
|
||||
import os
|
||||
import subprocess
|
||||
import re
|
||||
import logging
|
||||
import time
|
||||
|
||||
from Config import config
|
||||
|
||||
|
||||
# Find files with extension in path
|
||||
def findfiles(path, find_ext):
|
||||
for root, dirs, files in os.walk(path, topdown = False):
|
||||
for file in sorted(files):
|
||||
file_path = root+"/"+file
|
||||
file_ext = file.split(".")[-1]
|
||||
if file_ext in find_ext and not file.startswith("all."): yield file_path.replace("\\", "/")
|
||||
for root, dirs, files in os.walk(path, topdown=False):
|
||||
for file in sorted(files):
|
||||
file_path = root + "/" + file
|
||||
file_ext = file.split(".")[-1]
|
||||
if file_ext in find_ext and not file.startswith("all."):
|
||||
yield file_path.replace("\\", "/")
|
||||
|
||||
|
||||
# Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features
|
||||
def merge(merged_path):
|
||||
merge_dir = os.path.dirname(merged_path)
|
||||
s = time.time()
|
||||
ext = merged_path.split(".")[-1]
|
||||
if ext == "js": # If merging .js find .coffee too
|
||||
find_ext = ["js", "coffee"]
|
||||
else:
|
||||
find_ext = [ext]
|
||||
merge_dir = os.path.dirname(merged_path)
|
||||
s = time.time()
|
||||
ext = merged_path.split(".")[-1]
|
||||
if ext == "js": # If merging .js find .coffee too
|
||||
find_ext = ["js", "coffee"]
|
||||
else:
|
||||
find_ext = [ext]
|
||||
|
||||
# If exist check the other files modification date
|
||||
if os.path.isfile(merged_path):
|
||||
merged_mtime = os.path.getmtime(merged_path)
|
||||
else:
|
||||
merged_mtime = 0
|
||||
# If exist check the other files modification date
|
||||
if os.path.isfile(merged_path):
|
||||
merged_mtime = os.path.getmtime(merged_path)
|
||||
else:
|
||||
merged_mtime = 0
|
||||
|
||||
changed = {}
|
||||
for file_path in findfiles(merge_dir, find_ext):
|
||||
if os.path.getmtime(file_path) > merged_mtime:
|
||||
changed[file_path] = True
|
||||
if not changed:
|
||||
return # Assets not changed, nothing to do
|
||||
|
||||
changed = {}
|
||||
for file_path in findfiles(merge_dir, find_ext):
|
||||
if os.path.getmtime(file_path) > merged_mtime:
|
||||
changed[file_path] = True
|
||||
if not changed: return # Assets not changed, nothing to do
|
||||
if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile
|
||||
merged_old = open(merged_path, "rb").read().decode("utf8")
|
||||
old_parts = {}
|
||||
for match in re.findall("(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
|
||||
old_parts[match[1]] = match[2].strip("\n\r")
|
||||
|
||||
if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile
|
||||
merged_old = open(merged_path, "rb").read().decode("utf8")
|
||||
old_parts = {}
|
||||
for match in re.findall("(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
|
||||
old_parts[match[1]] = match[2].strip("\n\r")
|
||||
# Merge files
|
||||
parts = []
|
||||
s_total = time.time()
|
||||
for file_path in findfiles(merge_dir, find_ext):
|
||||
parts.append("\n\n/* ---- %s ---- */\n\n" % file_path)
|
||||
if file_path.endswith(".coffee"): # Compile coffee script
|
||||
if file_path in changed or file_path not in old_parts: # Only recompile if changed or its not compiled before
|
||||
if not config.coffeescript_compiler:
|
||||
logging.error("No coffeescript compiler definied, skipping compiling %s" % merged_path)
|
||||
return False # No coffeescript compiler, skip this file
|
||||
command = config.coffeescript_compiler % os.path.join(*file_path.split("/")) # Fix os path separator
|
||||
s = time.time()
|
||||
compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
|
||||
out = compiler.stdout.read().decode("utf8")
|
||||
logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s))
|
||||
if out and out.startswith("("):
|
||||
parts.append(out)
|
||||
else:
|
||||
error = out
|
||||
logging.error("%s Compile error: %s" % (file_path, error))
|
||||
parts.append(
|
||||
"alert('%s compile error: %s');" %
|
||||
(file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n"))
|
||||
)
|
||||
else: # Not changed use the old_part
|
||||
parts.append(old_parts[file_path])
|
||||
else: # Add to parts
|
||||
parts.append(open(file_path).read().decode("utf8"))
|
||||
|
||||
# Merge files
|
||||
parts = []
|
||||
s_total = time.time()
|
||||
for file_path in findfiles(merge_dir, find_ext):
|
||||
parts.append("\n\n/* ---- %s ---- */\n\n" % file_path)
|
||||
if file_path.endswith(".coffee"): # Compile coffee script
|
||||
if file_path in changed or file_path not in old_parts: # Only recompile if changed or its not compiled before
|
||||
if not config.coffeescript_compiler:
|
||||
logging.error("No coffeescript compiler definied, skipping compiling %s" % merged_path)
|
||||
return False # No coffeescript compiler, skip this file
|
||||
command = config.coffeescript_compiler % os.path.join(*file_path.split("/")) # Fix os path separator
|
||||
s = time.time()
|
||||
compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
|
||||
out = compiler.stdout.read().decode("utf8")
|
||||
logging.debug("Running: %s (Done in %.2fs)" % (command, time.time()-s))
|
||||
if out and out.startswith("("):
|
||||
parts.append(out)
|
||||
else:
|
||||
error = out
|
||||
logging.error("%s Compile error: %s" % (file_path, error))
|
||||
parts.append("alert('%s compile error: %s');" % (file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n") ) )
|
||||
else: # Not changed use the old_part
|
||||
parts.append(old_parts[file_path])
|
||||
else: # Add to parts
|
||||
parts.append(open(file_path).read().decode("utf8"))
|
||||
|
||||
merged = u"\n".join(parts)
|
||||
if ext == "css": # Vendor prefix css
|
||||
from lib.cssvendor import cssvendor
|
||||
merged = cssvendor.prefix(merged)
|
||||
merged = merged.replace("\r", "")
|
||||
open(merged_path, "wb").write(merged.encode("utf8"))
|
||||
logging.debug("Merged %s (%.2fs)" % (merged_path, time.time()-s_total))
|
||||
merged = u"\n".join(parts)
|
||||
if ext == "css": # Vendor prefix css
|
||||
from lib.cssvendor import cssvendor
|
||||
merged = cssvendor.prefix(merged)
|
||||
merged = merged.replace("\r", "")
|
||||
open(merged_path, "wb").write(merged.encode("utf8"))
|
||||
logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
os.chdir("..")
|
||||
config.coffeescript_compiler = r'type "%s" | tools\coffee-node\bin\node.exe tools\coffee-node\bin\coffee --no-header -s -p'
|
||||
merge("data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/js/all.js")
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
os.chdir("..")
|
||||
config.coffeescript_compiler = r'type "%s" | tools\coffee-node\bin\node.exe tools\coffee-node\bin\coffee --no-header -s -p'
|
||||
merge("data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/js/all.js")
|
||||
|
|
|
@ -1,42 +1,44 @@
|
|||
import logging, os, sys, time
|
||||
import logging
|
||||
import time
|
||||
import threading
|
||||
|
||||
from Config import config
|
||||
|
||||
if config.debug: # Only load pyfilesytem if using debug mode
|
||||
try:
|
||||
from fs.osfs import OSFS
|
||||
pyfilesystem = OSFS("src")
|
||||
pyfilesystem_plugins = OSFS("plugins")
|
||||
logging.debug("Pyfilesystem detected, source code autoreload enabled")
|
||||
except Exception, err:
|
||||
pyfilesystem = False
|
||||
if config.debug: # Only load pyfilesytem if using debug mode
|
||||
try:
|
||||
from fs.osfs import OSFS
|
||||
pyfilesystem = OSFS("src")
|
||||
pyfilesystem_plugins = OSFS("plugins")
|
||||
logging.debug("Pyfilesystem detected, source code autoreload enabled")
|
||||
except Exception, err:
|
||||
pyfilesystem = False
|
||||
else:
|
||||
pyfilesystem = False
|
||||
pyfilesystem = False
|
||||
|
||||
|
||||
class DebugReloader:
|
||||
def __init__ (self, callback, directory = "/"):
|
||||
self.last_chaged = 0
|
||||
if pyfilesystem:
|
||||
self.directory = directory
|
||||
self.callback = callback
|
||||
logging.debug("Adding autoreload: %s, cb: %s" % (directory, callback))
|
||||
thread = threading.Thread(target=self.addWatcher)
|
||||
thread.daemon = True
|
||||
thread.start()
|
||||
|
||||
def __init__(self, callback, directory="/"):
|
||||
self.last_chaged = 0
|
||||
if pyfilesystem:
|
||||
self.directory = directory
|
||||
self.callback = callback
|
||||
logging.debug("Adding autoreload: %s, cb: %s" % (directory, callback))
|
||||
thread = threading.Thread(target=self.addWatcher)
|
||||
thread.daemon = True
|
||||
thread.start()
|
||||
|
||||
def addWatcher(self, recursive=True):
|
||||
try:
|
||||
time.sleep(1) # Wait for .pyc compiles
|
||||
pyfilesystem.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive)
|
||||
pyfilesystem_plugins.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive)
|
||||
except Exception, err:
|
||||
print "File system watcher failed: %s (on linux pyinotify not gevent compatible yet :( )" % err
|
||||
def addWatcher(self, recursive=True):
|
||||
try:
|
||||
time.sleep(1) # Wait for .pyc compiles
|
||||
pyfilesystem.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive)
|
||||
pyfilesystem_plugins.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive)
|
||||
except Exception, err:
|
||||
print "File system watcher failed: %s (on linux pyinotify not gevent compatible yet :( )" % err
|
||||
|
||||
|
||||
def changed(self, evt):
|
||||
if not evt.path or "%s/" % config.data_dir in evt.path or evt.path.endswith("pyc") or time.time()-self.last_chaged < 1: return False # Ignore *.pyc changes and no reload within 1 sec
|
||||
#logging.debug("Changed: %s" % evt)
|
||||
time.sleep(0.1) # Wait for lock release
|
||||
self.callback()
|
||||
self.last_chaged = time.time()
|
||||
def changed(self, evt):
|
||||
if not evt.path or "%s/" % config.data_dir in evt.path or evt.path.endswith("pyc") or time.time() - self.last_chaged < 1:
|
||||
return False # Ignore *.pyc changes and no reload within 1 sec
|
||||
time.sleep(0.1) # Wait for lock release
|
||||
self.callback()
|
||||
self.last_chaged = time.time()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue