Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP

Loading…

1080p migration tweak #639

Merged
merged 2 commits into from

3 participants

@thezoggy
  • Add some logging to each 1080p migration part so people see some sort of progress in their console
  • Use show_id instead of tvdb_id for first part of migration, just in case there is a duplicate id or if show is missing tvdbid (never know)
  • Optimize query by reducing the arithmetic performed each time and queuing the list of updates to perform using a transaction (thanks @Prinz23)
@Prinz23

BEGIN/END TRANSACTION won't work correctly with sqlite3 python.

I use a mass_action funtcion (added to db.py) in my timezone coming episodes pull to fill the network table with all info i a second or so...

@Prinz23

My mass_action function does nothing else as use a transaction to execute any amount of sql queues

I just updated a fix for the function in the timezone pull request.

It's easy to use and to convert old calls to the action function to use this function. You just call that function with a python list with all the sql queues and the function executes it as one transaction. It's really fast in comparison.

@thezoggy

your mass_action function did exactly what i wanted and deff was a speed improvement. I've now included it as part of this pull. only using it for just this 1080p migration for right now..

@thezoggy thezoggy Add some logging to each 1080p migration part so people see some sort…
… of progress in their console

Use show_id instead of tvdb_id for first part of migration, just in case there is a duplicate id or if show is missing tvdbid (never know)
Optimize query by reducing the arithmetic performed each time and queuing the list of updates to perform using a transaction (thanks @Prinz23)
c3b8528
@Prinz23

I improved the speed of the mass_action a bit for systems with low IO disk performance

@thezoggy thezoggy Tweaked mass_action function so that we can log the individual querie…
…s if we wanted, by default we only log the amount of queries when completed. Should help with low powered devices when trying to do the work and log the work would cause some thrashing on the hdd.
91c428a
@midgetspy midgetspy merged commit fb37d33 into from
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Commits on Mar 28, 2013
  1. @thezoggy

    Add some logging to each 1080p migration part so people see some sort…

    thezoggy authored
    … of progress in their console
    
    Use show_id instead of tvdb_id for first part of migration, just in case there is a duplicate id or if show is missing tvdbid (never know)
    Optimize query by reducing the arithmetic performed each time and queuing the list of updates to perform using a transaction (thanks @Prinz23)
Commits on Mar 29, 2013
  1. @thezoggy

    Tweaked mass_action function so that we can log the individual querie…

    thezoggy authored
    …s if we wanted, by default we only log the amount of queries when completed. Should help with low powered devices when trying to do the work and log the work would cause some thrashing on the hdd.
This page is out of date. Refresh to see the latest.
Showing with 69 additions and 8 deletions.
  1. +24 −7 sickbeard/databases/mainDB.py
  2. +45 −1 sickbeard/db.py
View
31 sickbeard/databases/mainDB.py
@@ -27,6 +27,7 @@
MAX_DB_VERSION = 12
+
class MainSanityCheck(db.DBSanityCheck):
def check(self):
self.fix_duplicate_shows()
@@ -503,7 +504,7 @@ def execute(self):
ep_file_name = ek.ek(os.path.basename, cur_result["location"])
ep_file_name = os.path.splitext(ep_file_name)[0]
- # I only want to find real scene names here so anything with a space in it is out
+ # only want to find real scene names here so anything with a space in it is out
if ' ' in ep_file_name:
continue
@@ -621,6 +622,8 @@ def execute(self):
new_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV, common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY, common.Quality.UNKNOWN], [])
# update qualities (including templates)
+ logger.log(u"[1/4] Updating pre-defined templates and the quality for each show...", logger.MESSAGE)
+ ql = []
shows = self.connection.select("SELECT * FROM tv_shows")
for cur_show in shows:
if cur_show["quality"] == old_hd:
@@ -629,23 +632,37 @@ def execute(self):
new_quality = new_any
else:
new_quality = self._update_composite_qualities(cur_show["quality"])
- self.connection.action("UPDATE tv_shows SET quality = ? WHERE tvdb_id = ?", [new_quality, cur_show["tvdb_id"]])
+ ql.append(["UPDATE tv_shows SET quality = ? WHERE show_id = ?", [new_quality, cur_show["show_id"]]])
+ self.connection.mass_action(ql)
# update status that are are within the old hdwebdl (1<<3 which is 8) and better -- exclude unknown (1<<15 which is 32768)
- episodes = self.connection.select("SELECT * FROM tv_episodes WHERE status/100 < 32768 AND status/100 >= 8")
+ logger.log(u"[2/4] Updating the status for the episodes within each show...", logger.MESSAGE)
+ ql = []
+ episodes = self.connection.select("SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800")
for cur_episode in episodes:
- self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [self._update_status(cur_episode["status"]), cur_episode["episode_id"]])
+ ql.append(["UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [self._update_status(cur_episode["status"]), cur_episode["episode_id"]]])
+ self.connection.mass_action(ql)
# make two seperate passes through the history since snatched and downloaded (action & quality) may not always coordinate together
# update previous history so it shows the correct action
- historyAction = self.connection.select("SELECT * FROM history WHERE action/100 < 32768 AND action/100 >= 8")
+ logger.log(u"[3/4] Updating history to reflect the correct action...", logger.MESSAGE)
+ ql = []
+ historyAction = self.connection.select("SELECT * FROM history WHERE action < 3276800 AND action >= 800")
for cur_entry in historyAction:
- self.connection.action("UPDATE history SET action = ? WHERE showid = ? AND date = ?", [self._update_status(cur_entry["action"]), cur_entry["showid"], cur_entry["date"]])
+ ql.append(["UPDATE history SET action = ? WHERE showid = ? AND date = ?", [self._update_status(cur_entry["action"]), cur_entry["showid"], cur_entry["date"]]])
+ self.connection.mass_action(ql)
# update previous history so it shows the correct quality
+ logger.log(u"[4/4] Updating history to reflect the correct quality...", logger.MESSAGE)
+ ql = []
historyQuality = self.connection.select("SELECT * FROM history WHERE quality < 32768 AND quality >= 8")
for cur_entry in historyQuality:
- self.connection.action("UPDATE history SET quality = ? WHERE showid = ? AND date = ?", [self._update_quality(cur_entry["quality"]), cur_entry["showid"], cur_entry["date"]])
+ ql.append(["UPDATE history SET quality = ? WHERE showid = ? AND date = ?", [self._update_quality(cur_entry["quality"]), cur_entry["showid"], cur_entry["date"]]])
+ self.connection.mass_action(ql)
self.incDBVersion()
+
+ # cleanup and reduce db if any previous data was removed
+ logger.log(u"Performing a vacuum on the database.", logger.DEBUG)
+ self.connection.action("VACUUM")
View
46 sickbeard/db.py
@@ -16,7 +16,7 @@
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
-from __future__ import with_statement
+from __future__ import with_statement
import os.path
import re
@@ -66,6 +66,50 @@ def checkDBVersion(self):
else:
return 0
+ def mass_action(self, querylist, logTransaction=False):
+
+ with db_lock:
+
+ if querylist == None:
+ return
+
+ sqlResult = []
+ attempt = 0
+
+ while attempt < 5:
+ try:
+ for qu in querylist:
+ if len(qu) == 1:
+ if logTransaction:
+ logger.log(qu[0], logger.DEBUG)
+ sqlResult.append(self.connection.execute(qu[0]))
+ elif len(qu) > 1:
+ if logTransaction:
+ logger.log(qu[0] + " with args " + str(qu[1]), logger.DEBUG)
+ sqlResult.append(self.connection.execute(qu[0], qu[1]))
+ self.connection.commit()
+ logger.log(u"Transaction with " + str(len(querylist)) + u" query's executed", logger.DEBUG)
+ return sqlResult
+ except sqlite3.OperationalError, e:
+ sqlResult = []
+ if self.connection:
+ self.connection.rollback()
+ if "unable to open database file" in e.message or "database is locked" in e.message:
+ logger.log(u"DB error: " + ex(e), logger.WARNING)
+ attempt += 1
+ time.sleep(1)
+ else:
+ logger.log(u"DB error: " + ex(e), logger.ERROR)
+ raise
+ except sqlite3.DatabaseError, e:
+ sqlResult = []
+ if self.connection:
+ self.connection.rollback()
+ logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR)
+ raise
+
+ return sqlResult
+
def action(self, query, args=None):
with db_lock:
Something went wrong with that request. Please try again.