Skip to content

Commit

Permalink
Bulk minor cleanup/bugfixes
Browse files Browse the repository at this point in the history
Mainly adding/adjusting @type annotations, but also addresses several
actual bugs.

Non-exhaustive list of minor changes:
* Ensure we get a string from readFileSync instead of a Buffer by
  passing in an explicit encoding
* Fix MarkerCacheManager's #tryUpdateCache, whose db callback never
  actually executes because DatabaseWrapper doesn't use it.
* Several fixes around modified_date's potential null value
* Ensure 'final' is always a number outside of the client-facing isFinal
* Fix a call to PlexQueryManager.getEpisodesFromList that passed in an
  array instead of a Set. Also correctly check that set's 'size' and not
  its 'length'
* shiftMarkers didn't correctly calculate a backup section_id
* Several ServerError's didn't pass in an error code
* Ensure we set a version even if we fail to read package.json
* Use MarkerBreakdown.deltaFromType in QueryCommands.allStats instead of
  ~copying its implementation.
* Create typedef for BulkDelete return value
* Move MarkerBreakdownMap definition to MarkerBreakdown.js
* More specific DatabaseWrapper parameter typedefs
* Typedefs for config.json structure
* Be more explicit about string -> number conversion in several places
  (and remove some superfluous number -> number conversions)
* Replace some "store true" dictionaries with Sets
* Move LegacyMarkerBreakdown.Clear() out of ServerCommands, calling it
  directly from cleanupForShutdown
* Remove unused PlexQueryManager.getMovie
* typedefs for RawShow/SeasonData
* Remove unnecessary try/catch from a couple PlexQueryManager methods
* Remove starMs.toString() workaround that is no longer necessary (if
  it ever was)
* Account for failed contentType detection in sendCompressedData
* Pass MediaItemCache through to ThumbnailManager methods to avoid
  retrieving it multiple times unnecessarily
  • Loading branch information
danrahn committed Oct 21, 2023
1 parent 152bd5b commit 6a85e8a
Show file tree
Hide file tree
Showing 23 changed files with 409 additions and 254 deletions.
6 changes: 4 additions & 2 deletions Client/Script/Common.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { BulkMarkerResolveType } from '../../Shared/PlexTypes.js';
import { MarkerEnum } from '../../Shared/MarkerType.js';
import ServerPausedOverlay from './ServerPausedOverlay.js';

/** @typedef {!import('../../Shared/PlexTypes').BulkDeleteResult} BulkDeleteResult */
/** @typedef {!import('../../Shared/PlexTypes').BulkRestoreResponse} BulkRestoreResponse */
/** @typedef {!import('../../Shared/PlexTypes').ChapterMap} ChapterMap */
/** @typedef {!import('../../Shared/PlexTypes').CustomBulkAddMap} CustomBulkAddMap */
Expand All @@ -17,6 +18,7 @@ import ServerPausedOverlay from './ServerPausedOverlay.js';
/** @typedef {!import('../../Shared/PlexTypes').SerializedSeasonData} SerializedSeasonData */
/** @typedef {!import('../../Shared/PlexTypes').SerializedShowData} SerializedShowData */
/** @typedef {!import('../../Shared/PlexTypes').ShiftResult} ShiftResult */
/** @typedef {!import('../../Shared/MarkerBreakdown').MarkerBreakdownMap} MarkerBreakdownMap */

const Log = new ContextualLog('ClientCommon');

Expand Down Expand Up @@ -98,15 +100,15 @@ const ServerCommand = {
/**
* Query for all markers that would be deleted for the given metadata id.
* @param {number} id
* @returns {Promise<{markers: SerializedMarkerData[], episodeData?: SerializedEpisodeData[]}>} */
* @returns {Promise<BulkDeleteResult>} */
checkBulkDelete : async (id) => jsonRequest('bulk_delete', { id : id, dryRun : 1, applyTo : MarkerEnum.All, ignored : [] }),

/**
* Delete all markers associated with the given media item, except those specified in `ignored`.
* @param {number} id
* @param {number} applyTo The marker type(s) to apply the delete to.
* @param {number[]} [ignored =[]] List of marker ids to not delete.
* @returns {Promise<{markers: SerializedMarkerData[], deletedMarkers: SerializedMarkerData[]}>} */
* @returns {Promise<BulkDeleteResult>} */
bulkDelete : async (id, applyTo, ignored=[]) => jsonRequest('bulk_delete', { id : id, dryRun : 0, applyTo : applyTo, ignored : ignored.join(',') }),

/**
Expand Down
2 changes: 1 addition & 1 deletion Client/Script/FilterDialog.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import { PlexUI } from './PlexUI.js';
import { SectionType } from '../../Shared/PlexTypes.js';
import ThemeColors from './ThemeColors.js';

/** @typedef {!import('../../Shared/PlexTypes').MarkerBreakdownMap} MarkerBreakdownMap */
/** @typedef {!import('../../Shared/MarkerBreakdown').MarkerBreakdownMap} MarkerBreakdownMap */
/** @typedef {!import('../../Shared/PlexTypes').MarkerData} MarkerData */

const Log = new ContextualLog('SortFilter');
Expand Down
25 changes: 15 additions & 10 deletions Server/Commands/CoreCommands.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,9 @@ import { MarkerCache } from '../MarkerCacheManager.js';
import ServerError from '../ServerError.js';

/** @typedef {!import('../../Shared/PlexTypes').BulkAddResult} BulkAddResult */
/** @typedef {!import('../../Shared/PlexTypes').BulkDeleteResult} BulkDeleteResult */
/** @typedef {!import('../../Shared/PlexTypes').CustomBulkAddMap} CustomBulkAddMap */
/** @typedef {!import('../../Shared/PlexTypes').OldMarkerTimings} OldMarkerTimings */
/** @typedef {!import('../../Shared/PlexTypes').SerializedEpisodeData} SerializedEpisodeData */
/** @typedef {!import('../../Shared/PlexTypes').SerializedMarkerData} SerializedMarkerData */
/** @typedef {!import('../../Shared/PlexTypes').ShiftResult} ShiftResult */
Expand Down Expand Up @@ -222,15 +224,16 @@ class CoreCommands {

// Now make sure all indexes are in order
const reindexResult = await PlexQueries.reindex(metadataId);
/** @type {Record<number, RawMarkerData>} */
const reindexMap = {};
for (const marker of reindexResult.markers) {
reindexMap[marker.id] = marker;
}

const markerData = [];
/** @type {{[markerId: number]: RawMarkerData}} */
/** @type {OldMarkerTimings} */
const oldMarkerMap = {};
markerInfo.markers.forEach(m => oldMarkerMap[m.id] = m);
markerInfo.markers.forEach(m => oldMarkerMap[m.id] = { start : m.start, end : m.end });
for (const marker of shifted) {
if (reindexMap[marker.id]) {
marker.index = reindexMap[marker.id].index; // TODO: indexRemove: remove
Expand All @@ -257,11 +260,7 @@ class CoreCommands {
* @param {boolean} dryRun Whether we should just gather data about what we would delete.
* @param {number} applyTo The type of marker(s) to delete.
* @param {number[]} ignoredMarkerIds List of marker ids to not delete.
* @returns {Promise<{
* markers: SerializedMarkerData,
* deletedMarkers: SerializedMarkerData[],
* episodeData?: SerializedEpisodeData[]}>}
*/
* @returns {Promise<BulkDeleteResult>} */
static async bulkDelete(metadataId, dryRun, applyTo, ignoredMarkerIds) {
const markerInfo = await PlexQueries.getMarkersAuto(metadataId);
if (markerInfo.typeInfo.metadata_type == MetadataType.Movie) {
Expand Down Expand Up @@ -290,12 +289,13 @@ class CoreCommands {
if (dryRun) {
// All we really do for a dry run is grab all markers for the given metadata item,
// and associated episode data for the customization table

/** @type {SerializedMarkerData[]} */
const serializedMarkers = [];
for (const marker of markerInfo.markers) {
serializedMarkers.push(new MarkerData(marker));
}

/** @type {{ [episodeId: number]: EpisodeData }} */
const serializedEpisodeData = {};
const rawEpisodeData = await PlexQueries.getEpisodesFromList(episodeIds, metadataId);
rawEpisodeData.forEach(e => serializedEpisodeData[e.id] = new EpisodeData(e));
Expand All @@ -317,8 +317,10 @@ class CoreCommands {
newMarkerInfo.markers.length == ignoredMarkerIds.length,
`BulkDelete - expected new marker count to equal ignoredMarkerIds count. What went wrong?`);

/** @type {MarkerData[]} */
const serializedMarkers = [];
newMarkerInfo.markers.forEach(m => serializedMarkers.push(new MarkerData(m)));
/** @type {MarkerData[]} */
const deleted = [];
for (const deletedMarker of toDelete) {
const nonRaw = new MarkerData(deletedMarker);
Expand Down Expand Up @@ -346,7 +348,7 @@ class CoreCommands {
* @returns {Promise<BulkAddResult>>} */
static async bulkAdd(markerType, metadataId, start, end, resolveType, ignored=[]) {
if (resolveType != BulkMarkerResolveType.DryRun && (start < 0 || end <= start)) {
throw new ServerError(`Start cannot be negative or greater than end, found (start: ${start} end: ${end})`);
throw new ServerError(`Start cannot be negative or greater than end, found (start: ${start} end: ${end})`, 500);
}

if (Object.values(MarkerType).indexOf(markerType) === -1) {
Expand Down Expand Up @@ -412,6 +414,7 @@ class CoreCommands {

/** @type {{[episodeId: number]: RawMarkerData[]}} */
const markerCounts = {};
/** @type {OldMarkerTimings} */
const oldMarkerTimings = {};
for (const marker of previousMarkers) {
markerCounts[marker.parent_id] ??= 0;
Expand Down Expand Up @@ -454,6 +457,7 @@ class CoreCommands {
}

try {
/** @type {{ [key: string]: any }} */
const parsed = JSON.parse(markers);
for (const [key, value] of Object.entries(parsed)) {
if (isNaN(parseInt(key))) {
Expand All @@ -467,7 +471,7 @@ class CoreCommands {
}

if (start < 0 || end <= start) {
throw new ServerError(`Invalid marker timestamp data given [start=${start}, end=${end}]`);
throw new ServerError(`Invalid marker timestamp data given [start=${start}, end=${end}]`, 400);
}
}

Expand All @@ -483,6 +487,7 @@ class CoreCommands {
* @param {number} startShift
* @param {number} endShift */
static #checkOverflow(seen, rawEpisodeData, startShift, endShift) {
/** @type {{ [baseId: number]: number }} */
const limits = {};
for (const episode of rawEpisodeData) {
limits[episode.id] = episode.duration;
Expand Down
4 changes: 3 additions & 1 deletion Server/Commands/PurgeCommands.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import { MarkerCache } from '../MarkerCacheManager.js';
import { PlexQueries } from '../PlexQueryManager.js';
import ServerError from '../ServerError.js';

/** @typedef {!import('../../Shared/PlexTypes').BulkRestoreResponse} BulkRestoreResponse */
/** @typedef {!import('../../Shared/PlexTypes').MarkerDataMap} MarkerDataMap */


Expand Down Expand Up @@ -34,7 +35,8 @@ class PurgeCommands {
* Attempts to restore the last known state of the markers with the given ids.
* @param {number[]} oldMarkerIds
* @param {number} sectionId
* @param {number} resolveType */
* @param {number} resolveType
* @returns {BulkRestoreResponse} */
static async restoreMarkers(oldMarkerIds, sectionId, resolveType) {
// TODO: Why does bulk overwrite keep the old markers around?
if (Object.keys(MarkerConflictResolution).filter(k => MarkerConflictResolution[k] == resolveType).length == 0) {
Expand Down
36 changes: 21 additions & 15 deletions Server/Commands/QueryCommands.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { EpisodeData, MarkerData, MovieData, SeasonData, SectionType, ShowData } from '../../Shared/PlexTypes.js';
import { MarkerType, supportedMarkerType } from '../../Shared/MarkerType.js';
import { ContextualLog } from '../../Shared/ConsoleLog.js';
import MarkerBreakdown from '../../Shared/MarkerBreakdown.js';
import { supportedMarkerType } from '../../Shared/MarkerType.js';

import { Config } from '../MarkerEditorConfig.js';
import LegacyMarkerBreakdown from '../LegacyMarkerBreakdown.js';
Expand All @@ -10,6 +11,7 @@ import ServerError from '../ServerError.js';
import { Thumbnails } from '../ThumbnailManager.js';

/** @typedef {!import('../../Shared/PlexTypes').LibrarySection} LibrarySection */
/** @typedef {!import('../MarkerCacheManager').TreeStats} TreeStats */


const Log = new ContextualLog('QueryCommands');
Expand Down Expand Up @@ -133,7 +135,7 @@ class QueryCommands {
static async getEpisodes(metadataId) {
const rows = await PlexQueries.getEpisodes(metadataId);

/** @type {EpisodeData[]} */
/** @type {Promise<EpisodeData>[]} */
const promises = [];
const useThumbnails = Config.useThumbnails();
rows.forEach(rawEpisode => {
Expand Down Expand Up @@ -169,7 +171,8 @@ class QueryCommands {
/**
* Gather marker information for all episodes/movies in the given library,
* returning the number of items that have X markers associated with it.
* @param {number} sectionId The library section id to parse. */
* @param {number} sectionId The library section id to parse.
* @returns {Promise<Record<number, number>>} */
static async allStats(sectionId) {
// If we have global marker data, forego the specialized markerBreakdownCache
// and build the statistics using the cache manager.
Expand All @@ -182,6 +185,8 @@ class QueryCommands {
}

// Something went wrong with our global cache. Fall back to markerBreakdownCache.
Log.warn(`Extended marker stats are enabled, but we couldn't find cached section data. ` +
`Falling back to the legacy marker cache.`);
}

if (LegacyMarkerBreakdown.Cache[sectionId]) {
Expand All @@ -191,29 +196,26 @@ class QueryCommands {

const rows = await PlexQueries.markerStatsForSection(sectionId);

/** @type {Record<number, number>} */
const buckets = {};
Log.verbose(`Parsing ${rows.length} tags`);
let idCur = rows.length > 0 ? rows[0].parent_id : -1;
let countCur = 0;
// See MarkerBreakdown.js
const bucketDelta = (markerType) => markerType == MarkerType.Intro ? 1 : (1 << 16);

for (const row of rows) {
// TODO: better handing of non intros/credits (i.e. commercials)
if (!supportedMarkerType(row.marker_type)) {
continue;
}

const isMarker = row.tag_id == PlexQueries.markerTagId();
if (row.parent_id == idCur) {
if (row.tag_id == PlexQueries.markerTagId()) {
// See MarkerBreakdown.js
countCur += bucketDelta(row.marker_type);
}
countCur += isMarker ? MarkerBreakdown.deltaFromType(1, row.marker_type) : 0;
} else {
buckets[countCur] ??= 0;
++buckets[countCur];
idCur = row.parent_id;
countCur = row.tag_id == PlexQueries.markerTagId() ? bucketDelta(row.marker_type) : 0;
countCur = isMarker ? MarkerBreakdown.deltaFromType(1, row.marker_type) : 0;
}
}

Expand All @@ -227,23 +229,27 @@ class QueryCommands {
* optionally with breakdowns for each season attached.
* Only async to conform to command method signature.
* @param {number} metadataId The metadata id of the show/movie to grab the breakdown for.
* @param {number} includeSeasons 1 to include season data, 0 to leave it out. Ignored if metadataId is a movie. */
* @param {number} includeSeasons 1 to include season data, 0 to leave it out. Ignored if metadataId is a movie.
* @returns {Promise<TreeStats>} */
static async getMarkerBreakdownTree(metadataId, includeSeasons) {
if (!MarkerCache) {
throw new ServerError(`We shouldn't be calling get_breakdown when extended marker stats are disabled.`, 400);
}

includeSeasons = includeSeasons != 0;
let data = null;
/** @type {TreeStats|false} */
let data = false;
if (includeSeasons) {
data = MarkerCache.getTreeStats(metadataId);
} else {
data = MarkerCache.getTopLevelStats(metadataId);
data = { mainData : data, seasonData : {} };
const mainData = MarkerCache.getTopLevelStats(metadataId);
if (mainData) {
data = { mainData : data, seasonData : {} };
}
}

if (!data) {
throw new ServerError(`No marker data found for showId ${metadataId}.`, 400);
throw new ServerError(`No marker data found for id ${metadataId}.`, 400);
}

return data;
Expand Down
22 changes: 13 additions & 9 deletions Server/DatabaseWrapper.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,11 @@ import ServerError from './ServerError.js';

/** @typedef {!import('./CreateDatabase.cjs').SqliteDatabase} SqliteDatabase */

/** @typedef {[number|string|boolean|null]|{[parameter: string]: number|string|boolean|null}} QueryParameters */
/**
* @typedef {(number|string|boolean|null)[]} DbArrayParameters
* @typedef {{ _asRaw: Set<string>?, [parameter: string]: number|string|boolean|null }} DbDictParameters
* @typedef {DbArrayParameters|DbDictParameters} DbQueryParameters
*/

/**
* A wrapper around a Sqlite3 database that allows for async/await interaction,
Expand Down Expand Up @@ -33,7 +37,7 @@ class DatabaseWrapper {
/**
* Run the given query, returning no rows.
* @param {string} query
* @param {QueryParameters} [parameters=[]]
* @param {DbQueryParameters} [parameters=[]]
* @returns {Promise<void>} */
async run(query, parameters=[]) {
return this.#action(this.#db.run.bind(this.#db), query, parameters);
Expand All @@ -42,7 +46,7 @@ class DatabaseWrapper {
/**
* Retrieves a single row from the given query.
* @param {string} query
* @param {QueryParameters} [parameters=[]]
* @param {DbQueryParameters} [parameters=[]]
* @returns {Promise<any>} */
async get(query, parameters=[]) {
return this.#action(this.#db.get.bind(this.#db), query, parameters);
Expand All @@ -51,7 +55,7 @@ class DatabaseWrapper {
/**
* Retrieves all rows from the given query.
* @param {string} query
* @param {QueryParameters} parameters
* @param {DbQueryParameters} parameters
* @returns {Promise<any[]>} */
async all(query, parameters=[]) {
return this.#action(this.#db.all.bind(this.#db), query, parameters);
Expand Down Expand Up @@ -81,7 +85,7 @@ class DatabaseWrapper {
* instead of dealing with callbacks.
* @param {(sql : string, ...args : any) => Database} fn
* @param {string} query
* @param {*} parameters
* @param {DbQueryParameters|null} parameters
* @returns {Promise<any>} */
async #action(fn, query, parameters=null) {
return new Promise((resolve, reject) => {
Expand All @@ -100,7 +104,7 @@ class DatabaseWrapper {
* of the expected use-case, but it's slightly safer than writing raw queries and hoping for
* the best, which is currently the case for exec, as it doesn't accept parameterized queries
* @param {string} query Query with a '?' for each parameter.
* @param {QueryParameters} parameters The parameters to insert. Only expects numbers and strings
* @param {DbQueryParameters} parameters The parameters to insert. Only expects numbers and strings
* @throws {ServerError} If there are too many or too few parameters, or if there's a non-number/string parameter. */
static parameterize(query, parameters) {
if (parameters instanceof Array) {
Expand All @@ -117,7 +121,7 @@ class DatabaseWrapper {

/**
* Parameterize the given query of unnamed parameters.
* @param {[number|string|boolean|null]} parameters The parameters to insert. Only expects numbers and strings
* @param {DbArrayParameters} parameters The parameters to insert. Only expects numbers and strings
* @throws {ServerError} If there are too many or too few parameters, or if there's a non-number/string parameter. */
static #parameterizeArray(query, parameters) {
let startSearch = 0;
Expand Down Expand Up @@ -163,7 +167,7 @@ class DatabaseWrapper {
/**
* Parameterize the given query of named parameters.
* @param {string} query The query with named parameter placeholders.
* @param {{[parameterName: string]: number|string|boolean}} parameters The parameters to insert. Only expects number, string, bool
* @param {DbDictParameters} parameters The parameters to insert. Only expects number, string, bool
* @throws {ServerError} If there are too many or too few parameters, or if there's a non-number/string parameter. */
static #parameterizeNamed(query, parameters) {
let newQuery = query;
Expand All @@ -176,7 +180,7 @@ class DatabaseWrapper {
for (const [parameter, value] of Object.entries(parameters)) {
const regexFind = new RegExp(`\\${parameter}\\b`, 'g');
if (!regexFind.test(newQuery)) {
throw new ServerError(`Unable to parameterize query, parameter "${parameter}" not found!`);
throw new ServerError(`Unable to parameterize query, parameter "${parameter}" not found!`, 500);
}

let escapedValue = value;
Expand Down
5 changes: 4 additions & 1 deletion Server/FirstRunConfig.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ import { ContextualLog } from '../Shared/ConsoleLog.js';
import { MarkerEditorConfig } from './MarkerEditorConfig.js';
import { testFfmpeg } from './ServerHelpers.js';

/** @typedef {!import('./MarkerEditorConfig.js').RawConfigFeatures} RawConfigFeatures */
/** @typedef {!import('./MarkerEditorConfig.js').RawConfig} RawConfig */

const Log = new ContextualLog('FirstRun');

Expand Down Expand Up @@ -52,6 +54,7 @@ async function FirstRunConfig(dataRoot) {
await rl.question('Press Enter to continue to configuration (Ctrl+C to cancel at any point): ');
console.log();

/** @type {RawConfig} */
const config = {};

const isDocker = process.env.IS_DOCKER;
Expand All @@ -67,7 +70,7 @@ async function FirstRunConfig(dataRoot) {
const defaultDb = join(config.dataPath, 'Plug-in Support', 'Databases', 'com.plexapp.plugins.library.db');
config.database = await askUserPath('Plex database path', rl, defaultDb);
config.host = await askUser('Editor host', 'localhost', rl);
config.port = await askUser('Editor port', '3232', rl, validPort, 'Invalid port number');
config.port = parseInt(await askUser('Editor port', '3232', rl, validPort, 'Invalid port number'));
}

config.logLevel = await askUser('Server log level (see wiki for available values)', 'Info', rl);
Expand Down

0 comments on commit 6a85e8a

Please sign in to comment.