BEGIN / END no longer necessary as there is no autocommit in new db module

This commit is contained in:
shortcutme 2019-03-16 02:18:53 +01:00
parent a46d8fe7f3
commit 4fe4d0a7e7
No known key found for this signature in database
GPG key ID: 5B63BAE6CB9613AE
6 changed files with 0 additions and 37 deletions

View file

@ -144,9 +144,7 @@ class ChartCollector(object):
s = time.time() s = time.time()
cur = self.db.getCursor() cur = self.db.getCursor()
cur.execute("BEGIN")
cur.cursor.executemany("INSERT INTO data (type_id, value, date_added) VALUES (?, ?, ?)", values) cur.cursor.executemany("INSERT INTO data (type_id, value, date_added) VALUES (?, ?, ?)", values)
cur.execute("END")
cur.close() cur.close()
self.log.debug("Global collectors inserted in %.3fs" % (time.time() - s)) self.log.debug("Global collectors inserted in %.3fs" % (time.time() - s))
@ -163,9 +161,7 @@ class ChartCollector(object):
s = time.time() s = time.time()
cur = self.db.getCursor() cur = self.db.getCursor()
cur.execute("BEGIN")
cur.cursor.executemany("INSERT INTO data (type_id, site_id, value, date_added) VALUES (?, ?, ?, ?)", values) cur.cursor.executemany("INSERT INTO data (type_id, site_id, value, date_added) VALUES (?, ?, ?, ?)", values)
cur.execute("END")
cur.close() cur.close()
self.log.debug("Site collectors inserted in %.3fs" % (time.time() - s)) self.log.debug("Site collectors inserted in %.3fs" % (time.time() - s))

View file

@ -126,7 +126,6 @@ class ContentDbPlugin(object):
if not site_id: if not site_id:
return False return False
cur = self.getCursor() cur = self.getCursor()
cur.execute("BEGIN")
res = cur.execute("SELECT * FROM content WHERE size_files_optional > 0 AND site_id = %s" % site_id) res = cur.execute("SELECT * FROM content WHERE size_files_optional > 0 AND site_id = %s" % site_id)
num = 0 num = 0
for row in res.fetchall(): for row in res.fetchall():
@ -135,7 +134,6 @@ class ContentDbPlugin(object):
num += self.setContentFilesOptional(site, row["inner_path"], content, cur=cur) num += self.setContentFilesOptional(site, row["inner_path"], content, cur=cur)
except Exception as err: except Exception as err:
self.log.error("Error loading %s into file_optional: %s" % (row["inner_path"], err)) self.log.error("Error loading %s into file_optional: %s" % (row["inner_path"], err))
cur.execute("COMMIT")
cur.close() cur.close()
# Set my files to pinned # Set my files to pinned
@ -158,10 +156,6 @@ class ContentDbPlugin(object):
def setContentFilesOptional(self, site, content_inner_path, content, cur=None): def setContentFilesOptional(self, site, content_inner_path, content, cur=None):
if not cur: if not cur:
cur = self cur = self
try:
cur.execute("BEGIN")
except Exception as err:
self.log.warning("Transaction begin error %s %s: %s" % (site, content_inner_path, Debug.formatException(err)))
num = 0 num = 0
site_id = self.site_ids[site.address] site_id = self.site_ids[site.address]
@ -193,11 +187,6 @@ class ContentDbPlugin(object):
self.optional_files[site_id][file_inner_path[-8:]] = 1 self.optional_files[site_id][file_inner_path[-8:]] = 1
num += 1 num += 1
if cur == self:
try:
cur.execute("END")
except Exception as err:
self.log.warning("Transaction end error %s %s: %s" % (site, content_inner_path, Debug.formatException(err)))
return num return num
def setContent(self, site, inner_path, content, size=0): def setContent(self, site, inner_path, content, size=0):
@ -269,10 +258,8 @@ class ContentDbPlugin(object):
if peer_num != row["peer"]: if peer_num != row["peer"]:
updates[row["file_id"]] = peer_num updates[row["file_id"]] = peer_num
self.execute("BEGIN")
for file_id, peer_num in updates.items(): for file_id, peer_num in updates.items():
self.execute("UPDATE file_optional SET peer = ? WHERE file_id = ?", (peer_num, file_id)) self.execute("UPDATE file_optional SET peer = ? WHERE file_id = ?", (peer_num, file_id))
self.execute("END")
num_updated += len(updates) num_updated += len(updates)
num_file += len(peer_nums) num_file += len(peer_nums)
@ -415,8 +402,6 @@ class ContentDbPlugin(object):
break break
cur = self.getCursor() cur = self.getCursor()
cur.execute("BEGIN")
for file_id in deleted_file_ids: for file_id in deleted_file_ids:
cur.execute("UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE ?", {"file_id": file_id}) cur.execute("UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE ?", {"file_id": file_id})
cur.execute("COMMIT")
cur.close() cur.close()

View file

@ -35,7 +35,6 @@ def processRequestLog():
content_db = ContentDbPlugin.content_db content_db = ContentDbPlugin.content_db
cur = content_db.getCursor() cur = content_db.getCursor()
num = 0 num = 0
cur.execute("BEGIN")
for site_id in request_log: for site_id in request_log:
for inner_path, uploaded in request_log[site_id].items(): for inner_path, uploaded in request_log[site_id].items():
content_db.execute( content_db.execute(
@ -43,7 +42,6 @@ def processRequestLog():
{"site_id": site_id, "inner_path": inner_path} {"site_id": site_id, "inner_path": inner_path}
) )
num += 1 num += 1
cur.execute("END")
request_log.clear() request_log.clear()

View file

@ -77,7 +77,6 @@ class ContentDbPlugin(object):
s = time.time() s = time.time()
site_id = self.site_ids.get(site.address) site_id = self.site_ids.get(site.address)
cur = self.getCursor() cur = self.getCursor()
cur.execute("BEGIN")
try: try:
cur.execute("DELETE FROM peer WHERE site_id = :site_id", {"site_id": site_id}) cur.execute("DELETE FROM peer WHERE site_id = :site_id", {"site_id": site_id})
cur.cursor.executemany( cur.cursor.executemany(
@ -86,8 +85,6 @@ class ContentDbPlugin(object):
) )
except Exception as err: except Exception as err:
site.log.error("Save peer error: %s" % err) site.log.error("Save peer error: %s" % err)
finally:
cur.execute("END")
site.log.debug("Peers saved in %.3fs" % (time.time() - s)) site.log.debug("Peers saved in %.3fs" % (time.time() - s))
def initSite(self, site): def initSite(self, site):

View file

@ -69,7 +69,6 @@ class FileRequestPlugin(object):
i += 1 i += 1
hashes_changed = 0 hashes_changed = 0
db.execute("BEGIN")
for onion, onion_hashes in onion_to_hash.items(): for onion, onion_hashes in onion_to_hash.items():
hashes_changed += db.peerAnnounce( hashes_changed += db.peerAnnounce(
ip_type="onion", ip_type="onion",
@ -78,7 +77,6 @@ class FileRequestPlugin(object):
hashes=onion_hashes, hashes=onion_hashes,
onion_signed=all_onions_signed onion_signed=all_onions_signed
) )
db.execute("END")
time_db_onion = time.time() - s time_db_onion = time.time() - s
s = time.time() s = time.time()

View file

@ -98,7 +98,6 @@ class Db(object):
s = time.time() s = time.time()
cur = self.getCursor() cur = self.getCursor()
cur.execute("BEGIN")
for command, params in self.delayed_queue: for command, params in self.delayed_queue:
if command == "insertOrUpdate": if command == "insertOrUpdate":
cur.insertOrUpdate(*params[0], **params[1]) cur.insertOrUpdate(*params[0], **params[1])
@ -165,8 +164,6 @@ class Db(object):
changed_tables = [] changed_tables = []
cur = self.getCursor() cur = self.getCursor()
cur.execute("BEGIN")
# Check internal tables # Check internal tables
# Check keyvalue table # Check keyvalue table
changed = cur.needTable("keyvalue", [ changed = cur.needTable("keyvalue", [
@ -221,7 +218,6 @@ class Db(object):
except Exception as err: except Exception as err:
self.log.error("Error creating table %s: %s" % (table_name, Debug.formatException(err))) self.log.error("Error creating table %s: %s" % (table_name, Debug.formatException(err)))
cur.execute("COMMIT")
self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time() - s, changed_tables)) self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time() - s, changed_tables))
if changed_tables: if changed_tables:
self.db_keyvalues = {} # Refresh table version cache self.db_keyvalues = {} # Refresh table version cache
@ -267,11 +263,7 @@ class Db(object):
# No cursor specificed # No cursor specificed
if not cur: if not cur:
cur = self.getCursor() cur = self.getCursor()
cur.execute("BEGIN")
cur.logging = False cur.logging = False
commit_after_done = True
else:
commit_after_done = False
# Row for current json file if required # Row for current json file if required
if not data or [dbmap for dbmap in matched_maps if "to_keyvalue" in dbmap or "to_table" in dbmap]: if not data or [dbmap for dbmap in matched_maps if "to_keyvalue" in dbmap or "to_table" in dbmap]:
@ -379,8 +371,6 @@ class Db(object):
self.log.debug("Cleanup json row for %s" % file_path) self.log.debug("Cleanup json row for %s" % file_path)
cur.execute("DELETE FROM json WHERE json_id = %s" % json_row["json_id"]) cur.execute("DELETE FROM json WHERE json_id = %s" % json_row["json_id"])
if commit_after_done:
cur.execute("COMMIT")
return True return True
@ -394,7 +384,6 @@ if __name__ == "__main__":
dbjson.collect_stats = True dbjson.collect_stats = True
dbjson.checkTables() dbjson.checkTables()
cur = dbjson.getCursor() cur = dbjson.getCursor()
cur.execute("BEGIN")
cur.logging = False cur.logging = False
dbjson.updateJson("data/users/content.json", cur=cur) dbjson.updateJson("data/users/content.json", cur=cur)
for user_dir in os.listdir("data/users"): for user_dir in os.listdir("data/users"):