Change to Python3 coding style
This commit is contained in:
parent
fc0fe0557b
commit
b0b9a4d33c
137 changed files with 910 additions and 913 deletions
|
@ -7,6 +7,7 @@ import random
|
|||
import sys
|
||||
import hashlib
|
||||
import collections
|
||||
import base64
|
||||
|
||||
import gevent
|
||||
import gevent.pool
|
||||
|
@ -17,14 +18,14 @@ from Peer import Peer
|
|||
from Worker import WorkerManager
|
||||
from Debug import Debug
|
||||
from Content import ContentManager
|
||||
from SiteStorage import SiteStorage
|
||||
from .SiteStorage import SiteStorage
|
||||
from Crypt import CryptHash
|
||||
from util import helper
|
||||
from util import Diff
|
||||
from Plugin import PluginManager
|
||||
from File import FileServer
|
||||
from SiteAnnouncer import SiteAnnouncer
|
||||
import SiteManager
|
||||
from .SiteAnnouncer import SiteAnnouncer
|
||||
from . import SiteManager
|
||||
|
||||
|
||||
@PluginManager.acceptPlugins
|
||||
|
@ -32,7 +33,8 @@ class Site(object):
|
|||
|
||||
def __init__(self, address, allow_create=True, settings=None):
|
||||
self.address = str(re.sub("[^A-Za-z0-9]", "", address)) # Make sure its correct address
|
||||
self.address_hash = hashlib.sha256(self.address).digest()
|
||||
self.address_hash = hashlib.sha256(self.address.encode("ascii")).digest()
|
||||
self.address_sha1 = hashlib.sha1(self.address.encode("ascii")).digest()
|
||||
self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging
|
||||
self.log = logging.getLogger("Site:%s" % self.address_short)
|
||||
self.addEventListeners()
|
||||
|
@ -127,7 +129,7 @@ class Site(object):
|
|||
def getSettingsCache(self):
|
||||
back = {}
|
||||
back["bad_files"] = self.bad_files
|
||||
back["hashfield"] = self.content_manager.hashfield.tostring().encode("base64")
|
||||
back["hashfield"] = base64.b64encode(self.content_manager.hashfield.tobytes()).decode("ascii")
|
||||
return back
|
||||
|
||||
# Max site size in MB
|
||||
|
@ -173,7 +175,7 @@ class Site(object):
|
|||
# Start download files
|
||||
file_threads = []
|
||||
if download_files:
|
||||
for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys():
|
||||
for file_relative_path in list(self.content_manager.contents[inner_path].get("files", {}).keys()):
|
||||
file_inner_path = content_inner_dir + file_relative_path
|
||||
|
||||
# Try to diff first
|
||||
|
@ -204,7 +206,7 @@ class Site(object):
|
|||
"Patched successfully: %s (diff: %.3fs, verify: %.3fs, write: %.3fs, on_done: %.3fs)" %
|
||||
(file_inner_path, time_diff, time_verify, time_write, time_on_done)
|
||||
)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Failed to patch %s: %s" % (file_inner_path, err))
|
||||
diff_success = False
|
||||
|
||||
|
@ -218,7 +220,7 @@ class Site(object):
|
|||
if inner_path == "content.json":
|
||||
gevent.spawn(self.updateHashfield)
|
||||
|
||||
for file_relative_path in self.content_manager.contents[inner_path].get("files_optional", {}).keys():
|
||||
for file_relative_path in list(self.content_manager.contents[inner_path].get("files_optional", {}).keys()):
|
||||
file_inner_path = content_inner_dir + file_relative_path
|
||||
if file_inner_path not in changed and not self.bad_files.get(file_inner_path):
|
||||
continue
|
||||
|
@ -233,7 +235,7 @@ class Site(object):
|
|||
|
||||
# Wait for includes download
|
||||
include_threads = []
|
||||
for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys():
|
||||
for file_relative_path in list(self.content_manager.contents[inner_path].get("includes", {}).keys()):
|
||||
file_inner_path = content_inner_dir + file_relative_path
|
||||
include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer)
|
||||
include_threads.append(include_thread)
|
||||
|
@ -262,7 +264,7 @@ class Site(object):
|
|||
def getReachableBadFiles(self):
|
||||
if not self.bad_files:
|
||||
return False
|
||||
return [bad_file for bad_file, retry in self.bad_files.iteritems() if retry < 3]
|
||||
return [bad_file for bad_file, retry in self.bad_files.items() if retry < 3]
|
||||
|
||||
# Retry download bad files
|
||||
def retryBadFiles(self, force=False):
|
||||
|
@ -272,7 +274,7 @@ class Site(object):
|
|||
content_inner_paths = []
|
||||
file_inner_paths = []
|
||||
|
||||
for bad_file, tries in self.bad_files.items():
|
||||
for bad_file, tries in list(self.bad_files.items()):
|
||||
if force or random.randint(0, min(40, tries)) < 4: # Larger number tries = less likely to check every 15min
|
||||
if bad_file.endswith("content.json"):
|
||||
content_inner_paths.append(bad_file)
|
||||
|
@ -286,7 +288,7 @@ class Site(object):
|
|||
self.pooledDownloadFile(file_inner_paths, only_if_bad=True)
|
||||
|
||||
def checkBadFiles(self):
|
||||
for bad_file in self.bad_files.keys():
|
||||
for bad_file in list(self.bad_files.keys()):
|
||||
file_info = self.content_manager.getFileInfo(bad_file)
|
||||
if bad_file.endswith("content.json"):
|
||||
if file_info is False and bad_file != "content.json":
|
||||
|
@ -374,7 +376,7 @@ class Site(object):
|
|||
queried.append(peer)
|
||||
modified_contents = []
|
||||
my_modified = self.content_manager.listModified(since)
|
||||
for inner_path, modified in res["modified_files"].iteritems(): # Check if the peer has newer files than we
|
||||
for inner_path, modified in res["modified_files"].items(): # Check if the peer has newer files than we
|
||||
has_newer = int(modified) > my_modified.get(inner_path, 0)
|
||||
has_older = int(modified) < my_modified.get(inner_path, 0)
|
||||
if inner_path not in self.bad_files and not self.content_manager.isArchived(inner_path, modified):
|
||||
|
@ -480,7 +482,7 @@ class Site(object):
|
|||
def redownloadContents(self):
|
||||
# Download all content.json again
|
||||
content_threads = []
|
||||
for inner_path in self.content_manager.contents.keys():
|
||||
for inner_path in list(self.content_manager.contents.keys()):
|
||||
content_threads.append(self.needFile(inner_path, update=True, blocking=False))
|
||||
|
||||
self.log.debug("Waiting %s content.json to finish..." % len(content_threads))
|
||||
|
@ -523,7 +525,7 @@ class Site(object):
|
|||
})
|
||||
if result:
|
||||
break
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Publish error: %s" % Debug.formatException(err))
|
||||
result = {"exception": Debug.formatException(err)}
|
||||
|
||||
|
@ -563,7 +565,7 @@ class Site(object):
|
|||
peers = set(peers)
|
||||
|
||||
self.log.info("Publishing %s to %s/%s peers (connected: %s) diffs: %s (%.2fk)..." % (
|
||||
inner_path, limit, len(self.peers), num_connected_peers, diffs.keys(), float(len(str(diffs))) / 1024
|
||||
inner_path, limit, len(self.peers), num_connected_peers, list(diffs.keys()), float(len(str(diffs))) / 1024
|
||||
))
|
||||
|
||||
if not peers:
|
||||
|
@ -631,8 +633,8 @@ class Site(object):
|
|||
)
|
||||
|
||||
# Copy files
|
||||
for content_inner_path, content in self.content_manager.contents.items():
|
||||
file_relative_paths = content.get("files", {}).keys()
|
||||
for content_inner_path, content in list(self.content_manager.contents.items()):
|
||||
file_relative_paths = list(content.get("files", {}).keys())
|
||||
|
||||
# Sign content.json at the end to make sure every file is included
|
||||
file_relative_paths.sort()
|
||||
|
@ -812,7 +814,7 @@ class Site(object):
|
|||
self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, connected, len(self.peers)))
|
||||
|
||||
if connected < need: # Need more than we have
|
||||
for peer in self.peers.values():
|
||||
for peer in list(self.peers.values()):
|
||||
if not peer.connection or not peer.connection.connected: # No peer connection or disconnected
|
||||
peer.pex() # Initiate peer exchange
|
||||
if peer.connection and peer.connection.connected:
|
||||
|
@ -831,7 +833,7 @@ class Site(object):
|
|||
|
||||
# Return: Probably peers verified to be connectable recently
|
||||
def getConnectablePeers(self, need_num=5, ignore=[], allow_private=True):
|
||||
peers = self.peers.values()
|
||||
peers = list(self.peers.values())
|
||||
found = []
|
||||
for peer in peers:
|
||||
if peer.key.endswith(":0"):
|
||||
|
@ -874,7 +876,7 @@ class Site(object):
|
|||
# Add random peers
|
||||
need_more = need_num - len(found)
|
||||
found_more = sorted(
|
||||
self.peers.values()[0:need_more * 50],
|
||||
list(self.peers.values())[0:need_more * 50],
|
||||
key=lambda peer: peer.reputation,
|
||||
reverse=True
|
||||
)[0:need_more * 2]
|
||||
|
@ -906,7 +908,7 @@ class Site(object):
|
|||
|
||||
# Cleanup probably dead peers and close connection if too much
|
||||
def cleanupPeers(self, peers_protected=[]):
|
||||
peers = self.peers.values()
|
||||
peers = list(self.peers.values())
|
||||
if len(peers) > 20:
|
||||
# Cleanup old peers
|
||||
removed = 0
|
||||
|
@ -1019,7 +1021,7 @@ class Site(object):
|
|||
# Send site status update to websocket clients
|
||||
def updateWebsocket(self, **kwargs):
|
||||
if kwargs:
|
||||
param = {"event": kwargs.items()[0]}
|
||||
param = {"event": list(kwargs.items())[0]}
|
||||
else:
|
||||
param = None
|
||||
for ws in self.websockets:
|
||||
|
|
|
@ -1,17 +1,16 @@
|
|||
import random
|
||||
import time
|
||||
import hashlib
|
||||
import urllib
|
||||
import urllib2
|
||||
import urllib.request
|
||||
import struct
|
||||
import socket
|
||||
import re
|
||||
import collections
|
||||
|
||||
from lib import bencode
|
||||
import bencode
|
||||
from lib.subtl.subtl import UdpTrackerClient
|
||||
from lib.PySocks import socks
|
||||
from lib.PySocks import sockshandler
|
||||
import socks
|
||||
import sockshandler
|
||||
import gevent
|
||||
|
||||
from Plugin import PluginManager
|
||||
|
@ -69,7 +68,7 @@ class SiteAnnouncer(object):
|
|||
back = []
|
||||
# Type of addresses they can reach me
|
||||
if config.trackers_proxy == "disable":
|
||||
for ip_type, opened in self.site.connection_server.port_opened.items():
|
||||
for ip_type, opened in list(self.site.connection_server.port_opened.items()):
|
||||
if opened:
|
||||
back.append(ip_type)
|
||||
if self.site.connection_server.tor_manager.start_onions:
|
||||
|
@ -221,7 +220,7 @@ class SiteAnnouncer(object):
|
|||
if error:
|
||||
self.stats[tracker]["status"] = "error"
|
||||
self.stats[tracker]["time_status"] = time.time()
|
||||
self.stats[tracker]["last_error"] = str(err).decode("utf8", "ignore")
|
||||
self.stats[tracker]["last_error"] = str(error)
|
||||
self.stats[tracker]["time_last_error"] = time.time()
|
||||
self.stats[tracker]["num_error"] += 1
|
||||
self.stats[tracker]["num_request"] += 1
|
||||
|
@ -359,9 +358,9 @@ class SiteAnnouncer(object):
|
|||
try:
|
||||
peer_data = bencode.decode(response)["peers"]
|
||||
response = None
|
||||
peer_count = len(peer_data) / 6
|
||||
peer_count = int(len(peer_data) / 6)
|
||||
peers = []
|
||||
for peer_offset in xrange(peer_count):
|
||||
for peer_offset in range(peer_count):
|
||||
off = 6 * peer_offset
|
||||
peer = peer_data[off:off + 6]
|
||||
addr, port = struct.unpack('!LH', peer)
|
||||
|
@ -379,7 +378,7 @@ class SiteAnnouncer(object):
|
|||
peers = self.site.getConnectedPeers()
|
||||
|
||||
if len(peers) == 0: # Small number of connected peers for this site, connect to any
|
||||
peers = self.site.peers.values()
|
||||
peers = list(self.site.peers.values())
|
||||
need_num = 10
|
||||
|
||||
random.shuffle(peers)
|
||||
|
@ -399,7 +398,7 @@ class SiteAnnouncer(object):
|
|||
|
||||
def updateWebsocket(self, **kwargs):
|
||||
if kwargs:
|
||||
param = {"event": kwargs.items()[0]}
|
||||
param = {"event": list(kwargs.items())[0]}
|
||||
else:
|
||||
param = None
|
||||
|
||||
|
|
|
@ -28,11 +28,11 @@ class SiteManager(object):
|
|||
def load(self, cleanup=True, startup=False):
|
||||
self.log.debug("Loading sites...")
|
||||
self.loaded = False
|
||||
from Site import Site
|
||||
from .Site import Site
|
||||
address_found = []
|
||||
added = 0
|
||||
# Load new adresses
|
||||
for address, settings in json.load(open("%s/sites.json" % config.data_dir)).iteritems():
|
||||
for address, settings in json.load(open("%s/sites.json" % config.data_dir)).items():
|
||||
if address not in self.sites:
|
||||
if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)):
|
||||
# Root content.json exists, try load site
|
||||
|
@ -40,7 +40,7 @@ class SiteManager(object):
|
|||
try:
|
||||
site = Site(address, settings=settings)
|
||||
site.content_manager.contents.get("content.json")
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error loading site %s: %s" % (address, err))
|
||||
continue
|
||||
self.sites[address] = site
|
||||
|
@ -56,7 +56,7 @@ class SiteManager(object):
|
|||
|
||||
# Remove deleted adresses
|
||||
if cleanup:
|
||||
for address in self.sites.keys():
|
||||
for address in list(self.sites.keys()):
|
||||
if address not in address_found:
|
||||
del(self.sites[address])
|
||||
self.log.debug("Removed site: %s" % address)
|
||||
|
@ -93,7 +93,7 @@ class SiteManager(object):
|
|||
data = {}
|
||||
# Generate data file
|
||||
s = time.time()
|
||||
for address, site in self.list().iteritems():
|
||||
for address, site in self.list().items():
|
||||
if recalculate_size:
|
||||
site.settings["size"], site.settings["size_optional"] = site.content_manager.getTotalSize() # Update site size
|
||||
data[address] = site.settings
|
||||
|
@ -108,7 +108,7 @@ class SiteManager(object):
|
|||
time_write = time.time() - s
|
||||
|
||||
# Remove cache from site settings
|
||||
for address, site in self.list().iteritems():
|
||||
for address, site in self.list().items():
|
||||
site.settings["cache"] = {}
|
||||
|
||||
self.log.debug("Saved sites in %.2fs (generate: %.2fs, write: %.2fs)" % (time.time() - s, time_generate, time_write))
|
||||
|
@ -134,12 +134,12 @@ class SiteManager(object):
|
|||
|
||||
# Return or create site and start download site files
|
||||
def need(self, address, all_file=True, settings=None):
|
||||
from Site import Site
|
||||
from .Site import Site
|
||||
site = self.get(address)
|
||||
if not site: # Site not exist yet
|
||||
self.sites_changed = int(time.time())
|
||||
# Try to find site with differect case
|
||||
for recover_address, recover_site in self.sites.items():
|
||||
for recover_address, recover_site in list(self.sites.items()):
|
||||
if recover_address.lower() == address.lower():
|
||||
return recover_site
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ from Translate import translate as _
|
|||
class SiteStorage(object):
|
||||
def __init__(self, site, allow_create=True):
|
||||
self.site = site
|
||||
self.directory = u"%s/%s" % (config.data_dir, self.site.address) # Site data diretory
|
||||
self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
|
||||
self.allowed_dir = os.path.abspath(self.directory) # Only serve file within this dir
|
||||
self.log = site.log
|
||||
self.db = None # Db class
|
||||
|
@ -59,7 +59,7 @@ class SiteStorage(object):
|
|||
def getDbSchema(self):
|
||||
try:
|
||||
schema = self.loadJson("dbschema.json")
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
raise Exception("dbschema.json is not a valid JSON: %s" % err)
|
||||
return schema
|
||||
|
||||
|
@ -92,7 +92,7 @@ class SiteStorage(object):
|
|||
# Return possible db files for the site
|
||||
def getDbFiles(self):
|
||||
found = 0
|
||||
for content_inner_path, content in self.site.content_manager.contents.iteritems():
|
||||
for content_inner_path, content in self.site.content_manager.contents.items():
|
||||
# content.json file itself
|
||||
if self.isFile(content_inner_path):
|
||||
yield content_inner_path, self.getPath(content_inner_path)
|
||||
|
@ -100,7 +100,7 @@ class SiteStorage(object):
|
|||
self.log.error("[MISSING] %s" % content_inner_path)
|
||||
# Data files in content.json
|
||||
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
|
||||
for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys():
|
||||
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
|
||||
if not file_relative_path.endswith(".json") and not file_relative_path.endswith("json.gz"):
|
||||
continue # We only interesed in json files
|
||||
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
|
||||
|
@ -181,7 +181,7 @@ class SiteStorage(object):
|
|||
self.event_db_busy.get() # Wait for event
|
||||
try:
|
||||
res = self.getDb().execute(query, params)
|
||||
except sqlite3.DatabaseError, err:
|
||||
except sqlite3.DatabaseError as err:
|
||||
if err.__class__.__name__ == "DatabaseError":
|
||||
self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query))
|
||||
self.rebuildDb()
|
||||
|
@ -240,7 +240,7 @@ class SiteStorage(object):
|
|||
os.rename(self.getPath(inner_path_before), self.getPath(inner_path_after))
|
||||
err = None
|
||||
break
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("%s rename error: %s (retry #%s)" % (inner_path_before, err, retry))
|
||||
time.sleep(0.1 + retry)
|
||||
if err:
|
||||
|
@ -297,7 +297,7 @@ class SiteStorage(object):
|
|||
self.log.debug("Loading json file to db: %s (file: %s)" % (inner_path, file))
|
||||
try:
|
||||
self.updateDbFile(inner_path, file)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
|
||||
self.closeDb()
|
||||
|
||||
|
@ -363,9 +363,9 @@ class SiteStorage(object):
|
|||
return self.directory
|
||||
|
||||
if ".." in inner_path:
|
||||
raise Exception(u"File not allowed: %s" % inner_path)
|
||||
raise Exception("File not allowed: %s" % inner_path)
|
||||
|
||||
return u"%s/%s" % (self.directory, inner_path)
|
||||
return "%s/%s" % (self.directory, inner_path)
|
||||
|
||||
# Get site dir relative path
|
||||
def getInnerPath(self, path):
|
||||
|
@ -375,7 +375,7 @@ class SiteStorage(object):
|
|||
if path.startswith(self.directory):
|
||||
inner_path = path[len(self.directory) + 1:]
|
||||
else:
|
||||
raise Exception(u"File not allowed: %s" % path)
|
||||
raise Exception("File not allowed: %s" % path)
|
||||
return inner_path
|
||||
|
||||
# Verify all files sha512sum using content.json
|
||||
|
@ -390,7 +390,7 @@ class SiteStorage(object):
|
|||
self.log.debug("VerifyFile content.json not exists")
|
||||
self.site.needFile("content.json", update=True) # Force update to fix corrupt file
|
||||
self.site.content_manager.loadContent() # Reload content.json
|
||||
for content_inner_path, content in self.site.content_manager.contents.items():
|
||||
for content_inner_path, content in list(self.site.content_manager.contents.items()):
|
||||
back["num_content"] += 1
|
||||
i += 1
|
||||
if i % 50 == 0:
|
||||
|
@ -400,7 +400,7 @@ class SiteStorage(object):
|
|||
self.log.debug("[MISSING] %s" % content_inner_path)
|
||||
bad_files.append(content_inner_path)
|
||||
|
||||
for file_relative_path in content.get("files", {}).keys():
|
||||
for file_relative_path in list(content.get("files", {}).keys()):
|
||||
back["num_file"] += 1
|
||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||
file_inner_path = file_inner_path.strip("/") # Strip leading /
|
||||
|
@ -418,7 +418,7 @@ class SiteStorage(object):
|
|||
else:
|
||||
try:
|
||||
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
ok = False
|
||||
|
||||
if not ok:
|
||||
|
@ -430,7 +430,7 @@ class SiteStorage(object):
|
|||
# Optional files
|
||||
optional_added = 0
|
||||
optional_removed = 0
|
||||
for file_relative_path in content.get("files_optional", {}).keys():
|
||||
for file_relative_path in list(content.get("files_optional", {}).keys()):
|
||||
back["num_optional"] += 1
|
||||
file_node = content["files_optional"][file_relative_path]
|
||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||
|
@ -451,7 +451,7 @@ class SiteStorage(object):
|
|||
else:
|
||||
try:
|
||||
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
ok = False
|
||||
|
||||
if ok:
|
||||
|
@ -475,7 +475,7 @@ class SiteStorage(object):
|
|||
)
|
||||
|
||||
self.site.content_manager.contents.db.processDelayed()
|
||||
time.sleep(0.0001) # Context switch to avoid gevent hangs
|
||||
time.sleep(0.001) # Context switch to avoid gevent hangs
|
||||
return back
|
||||
|
||||
# Check and try to fix site files integrity
|
||||
|
@ -497,15 +497,15 @@ class SiteStorage(object):
|
|||
def deleteFiles(self):
|
||||
self.log.debug("Deleting files from content.json...")
|
||||
files = [] # Get filenames
|
||||
for content_inner_path in self.site.content_manager.contents.keys():
|
||||
for content_inner_path in list(self.site.content_manager.contents.keys()):
|
||||
content = self.site.content_manager.contents.get(content_inner_path, {})
|
||||
files.append(content_inner_path)
|
||||
# Add normal files
|
||||
for file_relative_path in content.get("files", {}).keys():
|
||||
for file_relative_path in list(content.get("files", {}).keys()):
|
||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||
files.append(file_inner_path)
|
||||
# Add optional files
|
||||
for file_relative_path in content.get("files_optional", {}).keys():
|
||||
for file_relative_path in list(content.get("files_optional", {}).keys()):
|
||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||
files.append(file_inner_path)
|
||||
|
||||
|
@ -518,7 +518,7 @@ class SiteStorage(object):
|
|||
db_path = self.getPath(schema["db_file"])
|
||||
if os.path.isfile(db_path):
|
||||
os.unlink(db_path)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Db file delete error: %s" % err)
|
||||
|
||||
for inner_path in files:
|
||||
|
@ -528,8 +528,8 @@ class SiteStorage(object):
|
|||
try:
|
||||
os.unlink(path)
|
||||
break
|
||||
except Exception, err:
|
||||
self.log.error(u"Error removing %s: %s, try #%s" % (inner_path, err, retry))
|
||||
except Exception as err:
|
||||
self.log.error("Error removing %s: %s, try #%s" % (inner_path, err, retry))
|
||||
time.sleep(float(retry) / 10)
|
||||
self.onUpdated(inner_path, False)
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
from Site import Site
|
||||
from SiteStorage import SiteStorage
|
||||
from SiteAnnouncer import SiteAnnouncer
|
||||
from .Site import Site
|
||||
from .SiteStorage import SiteStorage
|
||||
from .SiteAnnouncer import SiteAnnouncer
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue