Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add new field watched to tv_episodes table and Episode class. #4825

Merged
merged 6 commits into from
Aug 10, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
#### Improvements
- Converted /config/postProcessing to a Vue component ([#4259](https://github.com/pymedusa/Medusa/pull/4259))
- Bundled the web application using Webpack ([#4692](https://github.com/pymedusa/Medusa/pull/4692))
- Added a new field name 'watched' to the tv_episodes db table. UI will be added in future. ([#4692](https://github.com/pymedusa/Medusa/pull/4825))

#### Fixes
- Fixed error when changing episode status from episode status management ([#4783](https://github.com/pymedusa/Medusa/pull/4783))
Expand Down
52 changes: 48 additions & 4 deletions medusa/databases/main_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -699,8 +699,8 @@ def execute(self):
utils.backup_database(self.connection.path, self.connection.version)

log.info(u'Adding new quality field in the tv_episodes table')
self.connection.action('DROP TABLE IF EXISTS old_tv_episodes;')
self.connection.action('ALTER TABLE tv_episodes RENAME TO old_tv_episodes;')
self.connection.action('DROP TABLE IF EXISTS tmp_tv_episodes;')
self.connection.action('ALTER TABLE tv_episodes RENAME TO tmp_tv_episodes;')

self.connection.action(
'CREATE TABLE IF NOT EXISTS tv_episodes '
Expand All @@ -725,14 +725,14 @@ def execute(self):
'subtitles, subtitles_searchcount, subtitles_lastsearch, '
'is_proper, scene_season, scene_episode, absolute_number, '
'scene_absolute_number, version, release_group, manually_searched '
'FROM old_tv_episodes;'
'FROM tmp_tv_episodes;'
)

# We have all that we need, drop the old table
for index in ['idx_sta_epi_air', 'idx_sta_epi_sta_air', 'idx_status']:
log.info(u'Dropping the index on {0}', index)
self.connection.action('DROP INDEX IF EXISTS {index};'.format(index=index))
self.connection.action('DROP TABLE IF EXISTS old_tv_episodes;')
self.connection.action('DROP TABLE IF EXISTS tmp_tv_episodes;')

log.info(u'Splitting the composite status into status and quality')
sql_results = self.connection.select('SELECT status from tv_episodes GROUP BY status;')
Expand Down Expand Up @@ -840,3 +840,47 @@ def shift_history_qualities(self):
'UPDATE history SET quality = ? WHERE quality = ?;',
[new_quality, quality]
)


class AddEpisodeWatchedField(ShiftQualities):
"""Add episode watched field."""

def test(self):
"""Test if the version is at least 44.12"""
return self.connection.version >= (44, 12)

def execute(self):
utils.backup_database(self.connection.path, self.connection.version)

log.info(u'Adding new watched field in the tv_episodes table')
self.connection.action('DROP TABLE IF EXISTS tmp_tv_episodes;')
self.connection.action('ALTER TABLE tv_episodes RENAME TO tmp_tv_episodes;')

self.connection.action(
'CREATE TABLE IF NOT EXISTS tv_episodes '
'(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, '
'name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, '
'hastbn NUMERIC, status NUMERIC, quality NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, '
'subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, '
'is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, '
'scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT, '
'manually_searched NUMERIC, watched NUMERIC);'
)

# Re-insert old values, setting the new column 'watched' to the default value 0.
self.connection.action(
'INSERT INTO tv_episodes '
'(showid, indexerid, indexer, name, season, episode, description, airdate, hasnfo, '
'hastbn, status, quality, location, file_size, release_name, subtitles, subtitles_searchcount, '
'subtitles_lastsearch, is_proper, scene_season, scene_episode, absolute_number, scene_absolute_number, '
'version, release_group, manually_searched, watched) '
'SELECT showid, indexerid, indexer, '
'name, season, episode, description, airdate, hasnfo, '
'hastbn, status, quality, location, file_size, release_name, '
'subtitles, subtitles_searchcount, subtitles_lastsearch, '
'is_proper, scene_season, scene_episode, absolute_number, '
'scene_absolute_number, version, release_group, manually_searched, 0 AS watched '
'FROM tmp_tv_episodes;'
)

self.connection.action('DROP TABLE tmp_tv_episodes;')
2 changes: 2 additions & 0 deletions medusa/server/api/v2/episodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from medusa.logger.adapters.style import BraceAdapter
from medusa.server.api.v2.base import (
BaseRequestHandler,
BooleanField,
IntegerField,
iter_nested_items,
set_nested_value,
Expand Down Expand Up @@ -135,6 +136,7 @@ def _patch_episode(episode, data):
patches = {
'status': IntegerField(episode, 'status'),
'quality': IntegerField(episode, 'quality'),
'watched': BooleanField(episode, 'watched'),
}

for key, value in iter_nested_items(data):
Expand Down
58 changes: 34 additions & 24 deletions medusa/tv/episode.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,6 +265,7 @@ def __init__(self, series, season, episode, filepath=''):
self.related_episodes = []
self.wanted_quality = []
self.loaded = False
self.watched = False
if series:
self._specify_episode(self.season, self.episode)
self.check_for_meta_files()
Expand Down Expand Up @@ -631,6 +632,7 @@ def load_from_db(self, season, episode):
self.airdate = date.fromordinal(int(sql_results[0][b'airdate']))
self.status = int(sql_results[0][b'status'] or UNSET)
self.quality = int(sql_results[0][b'quality'] or Quality.NA)
self.watched = int(sql_results[0][b'watched'])

# don't overwrite my location
if sql_results[0][b'location']:
Expand Down Expand Up @@ -1062,6 +1064,7 @@ def to_json(self, detailed=True):
data['title'] = self.name
data['subtitles'] = self.subtitles
data['status'] = self.status_name
data['watched'] = self.watched
data['quality'] = self.quality
data['release'] = {}
data['release']['name'] = self.release_name
Expand Down Expand Up @@ -1217,13 +1220,14 @@ def get_sql(self):
b' version = ?, '
b' release_group = ?, '
b' manually_searched = ? '
b' watched = ? '
b'WHERE '
b' episode_id = ?',
[self.indexerid, self.indexer, self.name, self.description, ','.join(self.subtitles),
self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo,
self.hastbn, self.status, self.quality, self.location, self.file_size, self.release_name,
self.is_proper, self.series.series_id, self.season, self.episode, self.absolute_number,
self.version, self.release_group, self.manually_searched, ep_id]]
self.version, self.release_group, self.manually_searched, self.watched, ep_id]]
else:
# Don't update the subtitle language when the srt file doesn't contain the
# alpha2 code, keep value from subliminal
Expand Down Expand Up @@ -1253,13 +1257,14 @@ def get_sql(self):
b' version = ?, '
b' release_group = ?, '
b' manually_searched = ? '
b' watched = ? '
b'WHERE '
b' episode_id = ?',
[self.indexerid, self.indexer, self.name, self.description,
self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo,
self.hastbn, self.status, self.quality, self.location, self.file_size, self.release_name,
self.is_proper, self.series.series_id, self.season, self.episode, self.absolute_number,
self.version, self.release_group, self.manually_searched, ep_id]]
self.version, self.release_group, self.manually_searched, self.watched, ep_id]]
else:
# use a custom insert method to get the data into the DB.
return [
Expand Down Expand Up @@ -1287,15 +1292,17 @@ def get_sql(self):
b' episode, '
b' absolute_number, '
b' version, '
b' release_group) '
b' release_group, '
b' manually_searched, '
b' watched) '
b'VALUES '
b' ((SELECT episode_id FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?), '
b' ?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);',
b' ?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);',
[self.series.indexer, self.series.series_id, self.season, self.episode, self.indexerid, self.series.indexer, self.name,
self.description, ','.join(self.subtitles), self.subtitles_searchcount, self.subtitles_lastsearch,
self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.quality, self.location,
self.file_size, self.release_name, self.is_proper, self.series.series_id, self.season, self.episode,
self.absolute_number, self.version, self.release_group]]
self.absolute_number, self.version, self.release_group, self.manually_searched, self.watched]]
except Exception as error:
log.error('{id}: Error while updating database: {error_msg!r}',
{'id': self.series.series_id, 'error_msg': error})
Expand All @@ -1305,25 +1312,28 @@ def save_to_db(self):
if not self.dirty:
return

new_value_dict = {b'indexerid': self.indexerid,
b'name': self.name,
b'description': self.description,
b'subtitles': ','.join(self.subtitles),
b'subtitles_searchcount': self.subtitles_searchcount,
b'subtitles_lastsearch': self.subtitles_lastsearch,
b'airdate': self.airdate.toordinal(),
b'hasnfo': self.hasnfo,
b'hastbn': self.hastbn,
b'status': self.status,
b'quality': self.quality,
b'location': self.location,
b'file_size': self.file_size,
b'release_name': self.release_name,
b'is_proper': self.is_proper,
b'absolute_number': self.absolute_number,
b'version': self.version,
b'release_group': self.release_group,
b'manually_searched': self.manually_searched}
new_value_dict = {
b'indexerid': self.indexerid,
b'name': self.name,
b'description': self.description,
b'subtitles': ','.join(self.subtitles),
b'subtitles_searchcount': self.subtitles_searchcount,
b'subtitles_lastsearch': self.subtitles_lastsearch,
b'airdate': self.airdate.toordinal(),
b'hasnfo': self.hasnfo,
b'hastbn': self.hastbn,
b'status': self.status,
b'quality': self.quality,
b'location': self.location,
b'file_size': self.file_size,
b'release_name': self.release_name,
b'is_proper': self.is_proper,
b'absolute_number': self.absolute_number,
b'version': self.version,
b'release_group': self.release_group,
b'manually_searched': self.manually_searched,
b'watched': self.watched,
}

control_value_dict = {
b'indexer': self.series.indexer,
Expand Down