diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..21a715cb --- /dev/null +++ b/.env.example @@ -0,0 +1,39 @@ +# General environment variables +TZ=London/Europe + +# PostgreSQL +POSTGRES_HOST=postgres +POSTGRES_PORT=5432 +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_DB=knightcrawler + +# MongoDB +MONGODB_HOST=mongodb +MONGODB_PORT=27017 +MONGODB_DB=knightcrawler +MONGO_INITDB_ROOT_USERNAME=mongo +MONGO_INITDB_ROOT_PASSWORD=mongo + +# Addon +DEBUG_MODE=false + +# Consumer +RABBIT_URI=amqp://guest:guest@rabbitmq:5672/?heartbeat=30 +QUEUE_NAME=ingested +JOB_CONCURRENCY=5 +JOBS_ENABLED=true +MAX_SINGLE_TORRENT_CONNECTIONS=10 +TORRENT_TIMEOUT=30000 +UDP_TRACKERS_ENABLED=true + +# Producer +RabbitMqConfiguration__Host=rabbitmq +RabbitMqConfiguration__QueueName=ingested +RabbitMqConfiguration__Username=guest +RabbitMqConfiguration__Password=guest +RabbitMqConfiguration__Durable=true +RabbitMqConfiguration__MaxQueueSize=0 +RabbitMqConfiguration__MaxPublishBatchSize=500 +RabbitMqConfiguration__PublishIntervalInSeconds=10 +GithubSettings__PAT= diff --git a/.gitignore b/.gitignore index e40d9682..0c759aa9 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ .now .DS_Store .idea +.env ## Ignore Visual Studio temporary files, build results, and ## files generated by popular Visual Studio add-ons. diff --git a/README.md b/README.md index 4dc31b0f..e2aa27db 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,7 @@ A self-hosted Stremio addon for streaming torrents via a debrid service. - [Overview](#overview) - [Using](#using) - [Initial setup (optional)](#initial-setup-optional) + - [Environment Setup](#environment-setup) - [Run the project](#run-the-project) - [Monitoring with Grafana and Prometheus (Optional)](#monitoring-with-grafana-and-prometheus-optional) - [Accessing RabbitMQ Management](#accessing-rabbitmq-management) @@ -60,11 +61,22 @@ We can search DebridMediaManager hash lists which are hosted on GitHub. This all GithubSettings__PAT= ``` + +### Environment Setup + +Before running the project, you need to set up the environment variables. Copy the `.env.example` file to `.env`: + +```sh +cp .env.example .env +``` + +Then set any of th values you'd like to customize. + ### Run the project Open a terminal in the directory and run the command: -``` sh +```sh docker compose up -d ``` @@ -108,6 +120,7 @@ Now, you can use these dashboards to monitor RabbitMQ and Postgres metrics. Note: If you encounter issues with missing or unavailable data in Grafana, please ensure on [Prometheus's target page](http://127.0.0.1:9090/targets) that the RabbitMQ target is up and running. + ## Importing external dumps A brief record of the steps required to import external data, in this case the rarbg dump which can be found on RD: diff --git a/docker-compose.yaml b/docker-compose.yaml index be018e55..b9dfd4e9 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -35,10 +35,9 @@ x-apps: &knightcrawler-app services: postgres: image: postgres:latest + env_file: + - .env environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: knightcrawler PGUSER: postgres # needed for healthcheck. ports: - "5432:5432" @@ -51,9 +50,8 @@ services: mongodb: image: mongo:latest - environment: - MONGO_INITDB_ROOT_USERNAME: mongo - MONGO_INITDB_ROOT_PASSWORD: mongo + env_file: + - .env ports: - "27017:27017" volumes: @@ -81,7 +79,7 @@ services: context: src/producer dockerfile: Dockerfile env_file: - - env/producer.env + - .env <<: *knightcrawler-app networks: - knightcrawler-network @@ -91,7 +89,7 @@ services: context: src/node/consumer dockerfile: Dockerfile env_file: - - env/consumer.env + - .env deploy: replicas: 3 <<: *knightcrawler-app @@ -105,7 +103,7 @@ services: ports: - "7000:7000" env_file: - - env/addon.env + - .env <<: *knightcrawler-app networks: - knightcrawler-network diff --git a/env/addon.env b/env/addon.env deleted file mode 100644 index 5b5fb6b7..00000000 --- a/env/addon.env +++ /dev/null @@ -1,4 +0,0 @@ -TZ=London/Europe -DATABASE_URI=postgres://postgres:postgres@postgres/knightcrawler -MONGODB_URI=mongodb://mongo:mongo@mongodb/knightcrawler?tls=false&authSource=admin -DEBUG_MODE=false \ No newline at end of file diff --git a/env/consumer.env b/env/consumer.env deleted file mode 100644 index 555a938a..00000000 --- a/env/consumer.env +++ /dev/null @@ -1,11 +0,0 @@ -TZ=London/Europe -MONGODB_URI=mongodb://mongo:mongo@mongodb/knightcrawler?tls=false&authSource=admin -DATABASE_URI=postgres://postgres:postgres@postgres/knightcrawler -RABBIT_URI=amqp://guest:guest@rabbitmq:5672/?heartbeat=30 -QUEUE_NAME=ingested -JOB_CONCURRENCY=5 -JOBS_ENABLED=true -ENABLE_SYNC=true -MAX_SINGLE_TORRENT_CONNECTIONS=10 -TORRENT_TIMEOUT=30000 -UDP_TRACKERS_ENABLED=true \ No newline at end of file diff --git a/env/producer.env b/env/producer.env deleted file mode 100644 index d9d8ba98..00000000 --- a/env/producer.env +++ /dev/null @@ -1,10 +0,0 @@ -ScrapeConfiguration__StorageConnectionString=host=postgres;username=postgres;password=postgres;database=knightcrawler; -RabbitMqConfiguration__Host=rabbitmq -RabbitMqConfiguration__QueueName=ingested -RabbitMqConfiguration__Username=guest -RabbitMqConfiguration__Password=guest -RabbitMqConfiguration__Durable=true -RabbitMqConfiguration__MaxQueueSize=0 -RabbitMqConfiguration__MaxPublishBatchSize=500 -RabbitMqConfiguration__PublishIntervalInSeconds=10 -GithubSettings__PAT= \ No newline at end of file diff --git a/src/node/addon/src/lib/cache.js b/src/node/addon/src/lib/cache.js index 1c6c4386..aca00922 100644 --- a/src/node/addon/src/lib/cache.js +++ b/src/node/addon/src/lib/cache.js @@ -1,6 +1,7 @@ import cacheManager from 'cache-manager'; import mangodbStore from 'cache-manager-mongodb'; -import { isStaticUrl } from '../moch/static.js'; +import { cacheConfig } from './config.js'; +import { isStaticUrl } from '../moch/static.js'; const GLOBAL_KEY_PREFIX = 'knightcrawler-addon'; const STREAM_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|stream`; @@ -14,90 +15,87 @@ const AVAILABILITY_EMPTY_TTL = 30 * 60; // 30 minutes const MESSAGE_VIDEO_URL_TTL = 60; // 1 minutes // When the streams are empty we want to cache it for less time in case of timeouts or failures -const MONGO_URI = process.env.MONGODB_URI; -const NO_CACHE = process.env.NO_CACHE || false; - const memoryCache = initiateMemoryCache(); const remoteCache = initiateRemoteCache(); function initiateRemoteCache() { - if (NO_CACHE) { - return null; - } else if (MONGO_URI) { - return cacheManager.caching({ - store: mangodbStore, - uri: MONGO_URI, - options: { - collection: 'knightcrawler_addon_collection', - socketTimeoutMS: 120000, - useNewUrlParser: true, - useUnifiedTopology: false, - ttl: STREAM_EMPTY_TTL - }, - ttl: STREAM_EMPTY_TTL, - ignoreCacheErrors: true - }); - } else { - return cacheManager.caching({ - store: 'memory', - ttl: STREAM_EMPTY_TTL - }); - } + if (cacheConfig.NO_CACHE) { + return null; + } else if (cacheConfig.MONGO_URI) { + return cacheManager.caching({ + store: mangodbStore, + uri: cacheConfig.MONGO_URI, + options: { + collection: 'knightcrawler_addon_collection', + socketTimeoutMS: 120000, + useNewUrlParser: true, + useUnifiedTopology: false, + ttl: STREAM_EMPTY_TTL + }, + ttl: STREAM_EMPTY_TTL, + ignoreCacheErrors: true + }); + } else { + return cacheManager.caching({ + store: 'memory', + ttl: STREAM_EMPTY_TTL + }); + } } function initiateMemoryCache() { - return cacheManager.caching({ - store: 'memory', - ttl: MESSAGE_VIDEO_URL_TTL, - max: Infinity // infinite LRU cache size - }); + return cacheManager.caching({ + store: 'memory', + ttl: MESSAGE_VIDEO_URL_TTL, + max: Infinity // infinite LRU cache size + }); } function cacheWrap(cache, key, method, options) { - if (NO_CACHE || !cache) { - return method(); - } - return cache.wrap(key, method, options); + if (cacheConfig.NO_CACHE || !cache) { + return method(); + } + return cache.wrap(key, method, options); } export function cacheWrapStream(id, method) { - return cacheWrap(remoteCache, `${STREAM_KEY_PREFIX}:${id}`, method, { - ttl: (streams) => streams.length ? STREAM_TTL : STREAM_EMPTY_TTL - }); + return cacheWrap(remoteCache, `${STREAM_KEY_PREFIX}:${id}`, method, { + ttl: (streams) => streams.length ? STREAM_TTL : STREAM_EMPTY_TTL + }); } export function cacheWrapResolvedUrl(id, method) { - return cacheWrap(memoryCache, `${RESOLVED_URL_KEY_PREFIX}:${id}`, method, { - ttl: (url) => isStaticUrl(url) ? MESSAGE_VIDEO_URL_TTL : STREAM_TTL - }); + return cacheWrap(memoryCache, `${RESOLVED_URL_KEY_PREFIX}:${id}`, method, { + ttl: (url) => isStaticUrl(url) ? MESSAGE_VIDEO_URL_TTL : STREAM_TTL + }); } export function cacheAvailabilityResults(results) { - Object.keys(results) - .forEach(infoHash => { - const key = `${AVAILABILITY_KEY_PREFIX}:${infoHash}`; - const value = results[infoHash]; - const ttl = value?.length ? AVAILABILITY_TTL : AVAILABILITY_EMPTY_TTL; - memoryCache.set(key, value, { ttl }) - }); - return results; + Object.keys(results) + .forEach(infoHash => { + const key = `${AVAILABILITY_KEY_PREFIX}:${infoHash}`; + const value = results[infoHash]; + const ttl = value?.length ? AVAILABILITY_TTL : AVAILABILITY_EMPTY_TTL; + memoryCache.set(key, value, { ttl }) + }); + return results; } export function getCachedAvailabilityResults(infoHashes) { - const keys = infoHashes.map(infoHash => `${AVAILABILITY_KEY_PREFIX}:${infoHash}`) - return new Promise(resolve => { - memoryCache.mget(...keys, (error, result) => { - if (error) { - console.log('Failed retrieve availability cache', error) - return resolve({}); - } - const availabilityResults = {}; - infoHashes.forEach((infoHash, index) => { - if (result[index]) { - availabilityResults[infoHash] = result[index]; - } - }); - resolve(availabilityResults); - }) - }); + const keys = infoHashes.map(infoHash => `${AVAILABILITY_KEY_PREFIX}:${infoHash}`) + return new Promise(resolve => { + memoryCache.mget(...keys, (error, result) => { + if (error) { + console.log('Failed retrieve availability cache', error) + return resolve({}); + } + const availabilityResults = {}; + infoHashes.forEach((infoHash, index) => { + if (result[index]) { + availabilityResults[infoHash] = result[index]; + } + }); + resolve(availabilityResults); + }) + }); } diff --git a/src/node/addon/src/lib/config.js b/src/node/addon/src/lib/config.js new file mode 100644 index 00000000..1e81569c --- /dev/null +++ b/src/node/addon/src/lib/config.js @@ -0,0 +1,38 @@ +export const cacheConfig = { + MONGODB_HOST: process.env.MONGODB_HOST || 'mongodb', + MONGODB_PORT: process.env.MONGODB_PORT || '27017', + MONGODB_DB: process.env.MONGODB_DB || 'selfhostio', + MONGO_INITDB_ROOT_USERNAME: process.env.MONGO_INITDB_ROOT_USERNAME || 'mongo', + MONGO_INITDB_ROOT_PASSWORD: process.env.MONGO_INITDB_ROOT_PASSWORD || 'mongo', + COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'selfhostio_consumer_collection', + NO_CACHE: parseBool(process.env.NO_CACHE, false), +} + +// Combine the environment variables into a connection string +// The combined string will look something like: +// 'mongodb://mongo:mongo@localhost:27017/selfhostio?authSource=admin' +cacheConfig.MONGO_URI = 'mongodb://' + cacheConfig.MONGO_INITDB_ROOT_USERNAME + ':' + cacheConfig.MONGO_INITDB_ROOT_PASSWORD + '@' + cacheConfig.MONGODB_HOST + ':' + cacheConfig.MONGODB_PORT + '/' + cacheConfig.MONGODB_DB + '?authSource=admin'; + +export const databaseConfig = { + POSTGRES_HOST: process.env.POSTGRES_HOST || 'postgres', + POSTGRES_PORT: process.env.POSTGRES_PORT || '5432', + POSTGRES_DATABASE: process.env.POSTGRES_DATABASE || 'selfhostio', + POSTGRES_USERNAME: process.env.POSTGRES_USERNAME || 'postgres', + POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD || 'postgres', +} + +// Combine the environment variables into a connection string +// The combined string will look something like: +// 'postgres://postgres:postgres@localhost:5432/selfhostio' +databaseConfig.POSTGRES_URI = 'postgres://' + databaseConfig.POSTGRES_USERNAME + ':' + databaseConfig.POSTGRES_PASSWORD + '@' + databaseConfig.POSTGRES_HOST + ':' + databaseConfig.POSTGRES_PORT + '/' + databaseConfig.POSTGRES_DATABASE; + + +function parseBool(boolString, defaultValue) { + const isString = typeof boolString === 'string' || boolString instanceof String; + + if (!isString) { + return defaultValue; + } + + return boolString.toLowerCase() === 'true' ? true : defaultValue; +} \ No newline at end of file diff --git a/src/node/addon/src/lib/repository.js b/src/node/addon/src/lib/repository.js index 1c268641..3e3298ba 100644 --- a/src/node/addon/src/lib/repository.js +++ b/src/node/addon/src/lib/repository.js @@ -1,64 +1,68 @@ import { Sequelize } from 'sequelize'; +import { databaseConfig } from './config.js'; const { Op } = Sequelize; -const DATABASE_URI = process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/postgres'; - -const database = new Sequelize(DATABASE_URI, { logging: false }); +const database = new Sequelize( + databaseConfig.POSTGRES_URI, + { + logging: false + } +); const Torrent = database.define('torrent', { - infoHash: { type: Sequelize.STRING(64), primaryKey: true }, - provider: { type: Sequelize.STRING(32), allowNull: false }, - torrentId: { type: Sequelize.STRING(128) }, - title: { type: Sequelize.STRING(256), allowNull: false }, - size: { type: Sequelize.BIGINT }, - type: { type: Sequelize.STRING(16), allowNull: false }, - uploadDate: { type: Sequelize.DATE, allowNull: false }, - seeders: { type: Sequelize.SMALLINT }, - trackers: { type: Sequelize.STRING(4096) }, - languages: { type: Sequelize.STRING(4096) }, - resolution: { type: Sequelize.STRING(16) } + infoHash: { type: Sequelize.STRING(64), primaryKey: true }, + provider: { type: Sequelize.STRING(32), allowNull: false }, + torrentId: { type: Sequelize.STRING(128) }, + title: { type: Sequelize.STRING(256), allowNull: false }, + size: { type: Sequelize.BIGINT }, + type: { type: Sequelize.STRING(16), allowNull: false }, + uploadDate: { type: Sequelize.DATE, allowNull: false }, + seeders: { type: Sequelize.SMALLINT }, + trackers: { type: Sequelize.STRING(4096) }, + languages: { type: Sequelize.STRING(4096) }, + resolution: { type: Sequelize.STRING(16) } } ); const File = database.define('file', { - id: { type: Sequelize.BIGINT, autoIncrement: true, primaryKey: true }, - infoHash: { - type: Sequelize.STRING(64), - allowNull: false, - references: { model: Torrent, key: 'infoHash' }, - onDelete: 'CASCADE' - }, - fileIndex: { type: Sequelize.INTEGER }, - title: { type: Sequelize.STRING(256), allowNull: false }, - size: { type: Sequelize.BIGINT }, - imdbId: { type: Sequelize.STRING(32) }, - imdbSeason: { type: Sequelize.INTEGER }, - imdbEpisode: { type: Sequelize.INTEGER }, - kitsuId: { type: Sequelize.INTEGER }, - kitsuEpisode: { type: Sequelize.INTEGER } + id: { type: Sequelize.BIGINT, autoIncrement: true, primaryKey: true }, + infoHash: { + type: Sequelize.STRING(64), + allowNull: false, + references: { model: Torrent, key: 'infoHash' }, + onDelete: 'CASCADE' + }, + fileIndex: { type: Sequelize.INTEGER }, + title: { type: Sequelize.STRING(256), allowNull: false }, + size: { type: Sequelize.BIGINT }, + imdbId: { type: Sequelize.STRING(32) }, + imdbSeason: { type: Sequelize.INTEGER }, + imdbEpisode: { type: Sequelize.INTEGER }, + kitsuId: { type: Sequelize.INTEGER }, + kitsuEpisode: { type: Sequelize.INTEGER } }, ); const Subtitle = database.define('subtitle', { - infoHash: { - type: Sequelize.STRING(64), - allowNull: false, - references: { model: Torrent, key: 'infoHash' }, - onDelete: 'CASCADE' - }, - fileIndex: { type: Sequelize.INTEGER, allowNull: false }, - fileId: { - type: Sequelize.BIGINT, - allowNull: true, - references: { model: File, key: 'id' }, - onDelete: 'SET NULL' - }, - title: { type: Sequelize.STRING(512), allowNull: false }, - size: { type: Sequelize.BIGINT, allowNull: false }, + infoHash: { + type: Sequelize.STRING(64), + allowNull: false, + references: { model: Torrent, key: 'infoHash' }, + onDelete: 'CASCADE' + }, + fileIndex: { type: Sequelize.INTEGER, allowNull: false }, + fileId: { + type: Sequelize.BIGINT, + allowNull: true, + references: { model: File, key: 'id' }, + onDelete: 'SET NULL' + }, + title: { type: Sequelize.STRING(512), allowNull: false }, + size: { type: Sequelize.BIGINT, allowNull: false }, }, { timestamps: false } ); @@ -69,66 +73,66 @@ File.hasMany(Subtitle, { foreignKey: 'fileId', constraints: false }); Subtitle.belongsTo(File, { foreignKey: 'fileId', constraints: false }); export function getTorrent(infoHash) { - return Torrent.findOne({ where: { infoHash: infoHash } }); + return Torrent.findOne({ where: { infoHash: infoHash } }); } export function getFiles(infoHashes) { - return File.findAll({ where: { infoHash: { [Op.in]: infoHashes} } }); + return File.findAll({ where: { infoHash: { [Op.in]: infoHashes } } }); } export function getImdbIdMovieEntries(imdbId) { - return File.findAll({ - where: { - imdbId: { [Op.eq]: imdbId } - }, - include: [Torrent], - limit: 500, - order: [ - [Torrent, 'size', 'DESC'] - ] - }); + return File.findAll({ + where: { + imdbId: { [Op.eq]: imdbId } + }, + include: [Torrent], + limit: 500, + order: [ + [Torrent, 'size', 'DESC'] + ] + }); } export function getImdbIdSeriesEntries(imdbId, season, episode) { - return File.findAll({ - where: { - imdbId: { [Op.eq]: imdbId }, - imdbSeason: { [Op.eq]: season }, - imdbEpisode: { [Op.eq]: episode } - }, - include: [Torrent], - limit: 500, - order: [ - [Torrent, 'size', 'DESC'] - ] - }); + return File.findAll({ + where: { + imdbId: { [Op.eq]: imdbId }, + imdbSeason: { [Op.eq]: season }, + imdbEpisode: { [Op.eq]: episode } + }, + include: [Torrent], + limit: 500, + order: [ + [Torrent, 'size', 'DESC'] + ] + }); } export function getKitsuIdMovieEntries(kitsuId) { - return File.findAll({ - where: { - kitsuId: { [Op.eq]: kitsuId } - }, - include: [Torrent], - limit: 500, - order: [ - [Torrent, 'size', 'DESC'] - ] - }); + return File.findAll({ + where: { + kitsuId: { [Op.eq]: kitsuId } + }, + include: [Torrent], + limit: 500, + order: [ + [Torrent, 'size', 'DESC'] + ] + }); } export function getKitsuIdSeriesEntries(kitsuId, episode) { - return File.findAll({ - where: { - kitsuId: { [Op.eq]: kitsuId }, - kitsuEpisode: { [Op.eq]: episode } - }, - include: [Torrent], - limit: 500, - order: [ - [Torrent, 'size', 'DESC'] - ] - }); + return File.findAll({ + where: { + kitsuId: { [Op.eq]: kitsuId }, + kitsuEpisode: { [Op.eq]: episode } + }, + include: [Torrent], + limit: 500, + order: [ + [Torrent, 'size', 'DESC'] + ] + }); } diff --git a/src/node/consumer/src/lib/config.js b/src/node/consumer/src/lib/config.js index ea1b55a9..92499566 100644 --- a/src/node/consumer/src/lib/config.js +++ b/src/node/consumer/src/lib/config.js @@ -4,16 +4,34 @@ } export const cacheConfig = { - MONGO_URI: process.env.MONGODB_URI || 'mongodb://mongo:mongo@localhost:27017/knightcrawler?authSource=admin', + MONGODB_HOST: process.env.MONGODB_HOST || 'mongodb', + MONGODB_PORT: process.env.MONGODB_PORT || '27017', + MONGODB_DB: process.env.MONGODB_DB || 'knightcrawler', + MONGO_INITDB_ROOT_USERNAME: process.env.MONGO_INITDB_ROOT_USERNAME || 'mongo', + MONGO_INITDB_ROOT_PASSWORD: process.env.MONGO_INITDB_ROOT_PASSWORD || 'mongo', NO_CACHE: parseBool(process.env.NO_CACHE, false), COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'knightcrawler_consumer_collection' } +// Combine the environment variables into a connection string +// The combined string will look something like: +// 'mongodb://mongo:mongo@localhost:27017/knightcrawler?authSource=admin' +cacheConfig.MONGO_URI = 'mongodb://' + cacheConfig.MONGO_INITDB_ROOT_USERNAME + ':' + cacheConfig.MONGO_INITDB_ROOT_PASSWORD + '@' + cacheConfig.MONGODB_HOST + ':' + cacheConfig.MONGODB_PORT + '/' + cacheConfig.MONGODB_DB + '?authSource=admin'; + export const databaseConfig = { - DATABASE_URI: process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/knightcrawler', - ENABLE_SYNC: parseBool(process.env.ENABLE_SYNC, true) + POSTGRES_HOST: process.env.POSTGRES_HOST || 'postgres', + POSTGRES_PORT: process.env.POSTGRES_PORT || '5432', + POSTGRES_DATABASE: process.env.POSTGRES_DATABASE || 'knightcrawler', + POSTGRES_USERNAME: process.env.POSTGRES_USERNAME || 'postgres', + POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD || 'postgres', + ENABLE_SYNC: true } +// Combine the environment variables into a connection string +// The combined string will look something like: +// 'postgres://postgres:postgres@localhost:5432/knightcrawler' +databaseConfig.POSTGRES_URI = 'postgres://' + databaseConfig.POSTGRES_USERNAME + ':' + databaseConfig.POSTGRES_PASSWORD + '@' + databaseConfig.POSTGRES_HOST + ':' + databaseConfig.POSTGRES_PORT + '/' + databaseConfig.POSTGRES_DATABASE; + export const jobConfig = { JOB_CONCURRENCY: parseInt(process.env.JOB_CONCURRENCY || 1), JOBS_ENABLED: parseBool(process.env.JOBS_ENABLED || true) @@ -36,10 +54,10 @@ export const torrentConfig = { function parseBool(boolString, defaultValue) { const isString = typeof boolString === 'string' || boolString instanceof String; - + if (!isString) { return defaultValue; } - - return boolString.toLowerCase() === 'true' ? true : defaultValue; + + return boolString.toLowerCase() === 'true' ? true : defaultValue; } \ No newline at end of file diff --git a/src/node/consumer/src/lib/repository.js b/src/node/consumer/src/lib/repository.js index eb26ec0f..96a75a46 100644 --- a/src/node/consumer/src/lib/repository.js +++ b/src/node/consumer/src/lib/repository.js @@ -5,16 +5,16 @@ import {logger} from "./logger.js"; import * as Promises from './promises.js'; const database = new Sequelize( - databaseConfig.DATABASE_URI, + databaseConfig.POSTGRES_URI, { - logging: false + logging: false } ); const Provider = database.define('provider', { - name: { type: DataTypes.STRING(32), primaryKey: true }, - lastScraped: { type: DataTypes.DATE }, - lastScrapedId: { type: DataTypes.STRING(128) } + name: { type: DataTypes.STRING(32), primaryKey: true }, + lastScraped: { type: DataTypes.DATE }, + lastScrapedId: { type: DataTypes.STRING(128) } }); const IngestedTorrent = database.define('ingested_torrent', { @@ -30,7 +30,8 @@ const IngestedTorrent = database.define('ingested_torrent', { processed: { type: DataTypes.BOOLEAN, defaultValue: false - }}, + } +}, { indexes: [ { @@ -42,9 +43,9 @@ const IngestedTorrent = database.define('ingested_torrent', { /* eslint-disable no-unused-vars */ const IngestedPage = database.define('ingested_page', { - id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true }, - url: { type: DataTypes.STRING, allowNull: false }, - }, + id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true }, + url: { type: DataTypes.STRING, allowNull: false }, +}, { indexes: [ { @@ -57,122 +58,122 @@ const IngestedPage = database.define('ingested_page', { const Torrent = database.define('torrent', { - infoHash: { type: DataTypes.STRING(64), primaryKey: true }, - provider: { type: DataTypes.STRING(32), allowNull: false }, - torrentId: { type: DataTypes.STRING(512) }, - title: { type: DataTypes.STRING(512), allowNull: false }, - size: { type: DataTypes.BIGINT }, - type: { type: DataTypes.STRING(16), allowNull: false }, - uploadDate: { type: DataTypes.DATE, allowNull: false }, - seeders: { type: DataTypes.SMALLINT }, - trackers: { type: DataTypes.STRING(8000) }, - languages: { type: DataTypes.STRING(4096) }, - resolution: { type: DataTypes.STRING(16) }, - reviewed: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false }, - opened: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false } + infoHash: { type: DataTypes.STRING(64), primaryKey: true }, + provider: { type: DataTypes.STRING(32), allowNull: false }, + torrentId: { type: DataTypes.STRING(512) }, + title: { type: DataTypes.STRING(512), allowNull: false }, + size: { type: DataTypes.BIGINT }, + type: { type: DataTypes.STRING(16), allowNull: false }, + uploadDate: { type: DataTypes.DATE, allowNull: false }, + seeders: { type: DataTypes.SMALLINT }, + trackers: { type: DataTypes.STRING(8000) }, + languages: { type: DataTypes.STRING(4096) }, + resolution: { type: DataTypes.STRING(16) }, + reviewed: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false }, + opened: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false } } ); const File = database.define('file', { - id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true }, - infoHash: { - type: DataTypes.STRING(64), - allowNull: false, - references: { model: Torrent, key: 'infoHash' }, - onDelete: 'CASCADE' - }, - fileIndex: { type: DataTypes.INTEGER }, - title: { type: DataTypes.STRING(512), allowNull: false }, - size: { type: DataTypes.BIGINT }, - imdbId: { type: DataTypes.STRING(32) }, - imdbSeason: { type: DataTypes.INTEGER }, - imdbEpisode: { type: DataTypes.INTEGER }, - kitsuId: { type: DataTypes.INTEGER }, - kitsuEpisode: { type: DataTypes.INTEGER } + id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true }, + infoHash: { + type: DataTypes.STRING(64), + allowNull: false, + references: { model: Torrent, key: 'infoHash' }, + onDelete: 'CASCADE' + }, + fileIndex: { type: DataTypes.INTEGER }, + title: { type: DataTypes.STRING(512), allowNull: false }, + size: { type: DataTypes.BIGINT }, + imdbId: { type: DataTypes.STRING(32) }, + imdbSeason: { type: DataTypes.INTEGER }, + imdbEpisode: { type: DataTypes.INTEGER }, + kitsuId: { type: DataTypes.INTEGER }, + kitsuEpisode: { type: DataTypes.INTEGER } }, { - indexes: [ - { - unique: true, - name: 'files_unique_file_constraint', - fields: [ - col('infoHash'), - fn('COALESCE', (col('fileIndex')), -1), - fn('COALESCE', (col('imdbId')), 'null'), - fn('COALESCE', (col('imdbSeason')), -1), - fn('COALESCE', (col('imdbEpisode')), -1), - fn('COALESCE', (col('kitsuId')), -1), - fn('COALESCE', (col('kitsuEpisode')), -1) - ] - }, - { unique: false, fields: ['imdbId', 'imdbSeason', 'imdbEpisode'] }, - { unique: false, fields: ['kitsuId', 'kitsuEpisode'] } - ] + indexes: [ + { + unique: true, + name: 'files_unique_file_constraint', + fields: [ + col('infoHash'), + fn('COALESCE', (col('fileIndex')), -1), + fn('COALESCE', (col('imdbId')), 'null'), + fn('COALESCE', (col('imdbSeason')), -1), + fn('COALESCE', (col('imdbEpisode')), -1), + fn('COALESCE', (col('kitsuId')), -1), + fn('COALESCE', (col('kitsuEpisode')), -1) + ] + }, + { unique: false, fields: ['imdbId', 'imdbSeason', 'imdbEpisode'] }, + { unique: false, fields: ['kitsuId', 'kitsuEpisode'] } + ] } ); const Subtitle = database.define('subtitle', { - infoHash: { - type: DataTypes.STRING(64), - allowNull: false, - references: { model: Torrent, key: 'infoHash' }, - onDelete: 'CASCADE' - }, - fileIndex: { - type: DataTypes.INTEGER, - allowNull: false - }, - fileId: { - type: DataTypes.BIGINT, - allowNull: true, - references: { model: File, key: 'id' }, - onDelete: 'SET NULL' - }, - title: { type: DataTypes.STRING(512), allowNull: false }, + infoHash: { + type: DataTypes.STRING(64), + allowNull: false, + references: { model: Torrent, key: 'infoHash' }, + onDelete: 'CASCADE' + }, + fileIndex: { + type: DataTypes.INTEGER, + allowNull: false + }, + fileId: { + type: DataTypes.BIGINT, + allowNull: true, + references: { model: File, key: 'id' }, + onDelete: 'SET NULL' + }, + title: { type: DataTypes.STRING(512), allowNull: false }, }, { - timestamps: false, - indexes: [ - { - unique: true, - name: 'subtitles_unique_subtitle_constraint', - fields: [ - col('infoHash'), - col('fileIndex'), - fn('COALESCE', (col('fileId')), -1) - ] - }, - { unique: false, fields: ['fileId'] } - ] + timestamps: false, + indexes: [ + { + unique: true, + name: 'subtitles_unique_subtitle_constraint', + fields: [ + col('infoHash'), + col('fileIndex'), + fn('COALESCE', (col('fileId')), -1) + ] + }, + { unique: false, fields: ['fileId'] } + ] } ); const Content = database.define('content', { - infoHash: { - type: DataTypes.STRING(64), - primaryKey: true, - allowNull: false, - references: { model: Torrent, key: 'infoHash' }, - onDelete: 'CASCADE' - }, - fileIndex: { - type: DataTypes.INTEGER, - primaryKey: true, - allowNull: false - }, - path: { type: DataTypes.STRING(512), allowNull: false }, - size: { type: DataTypes.BIGINT }, + infoHash: { + type: DataTypes.STRING(64), + primaryKey: true, + allowNull: false, + references: { model: Torrent, key: 'infoHash' }, + onDelete: 'CASCADE' + }, + fileIndex: { + type: DataTypes.INTEGER, + primaryKey: true, + allowNull: false + }, + path: { type: DataTypes.STRING(512), allowNull: false }, + size: { type: DataTypes.BIGINT }, }, { - timestamps: false, + timestamps: false, } ); const SkipTorrent = database.define('skip_torrent', { - infoHash: { type: DataTypes.STRING(64), primaryKey: true }, + infoHash: { type: DataTypes.STRING(64), primaryKey: true }, }); Torrent.hasMany(File, { foreignKey: 'infoHash', constraints: false }); @@ -183,39 +184,39 @@ File.hasMany(Subtitle, { foreignKey: 'fileId', constraints: false }); Subtitle.belongsTo(File, { foreignKey: 'fileId', constraints: false }); export function connect() { - if (databaseConfig.ENABLE_SYNC) { - return database.sync({ alter: true }) - .catch(error => { - logger.error('Failed syncing database: ', error); - throw error; - }); - } - return Promise.resolve(); + if (databaseConfig.ENABLE_SYNC) { + return database.sync({ alter: true }) + .catch(error => { + console.error('Failed syncing database: ', error); + throw error; + }); + } + return Promise.resolve(); } export function getProvider(provider) { - return Provider.findOrCreate({ where: { name: { [Op.eq]: provider.name } }, defaults: provider }) - .then((result) => result[0]) - .catch(() => provider); + return Provider.findOrCreate({ where: { name: { [Op.eq]: provider.name } }, defaults: provider }) + .then((result) => result[0]) + .catch(() => provider); } export function getTorrent(torrent) { - const where = torrent.infoHash - ? { infoHash: torrent.infoHash } - : { provider: torrent.provider, torrentId: torrent.torrentId } - return Torrent.findOne({ where: where }); + const where = torrent.infoHash + ? { infoHash: torrent.infoHash } + : { provider: torrent.provider, torrentId: torrent.torrentId } + return Torrent.findOne({ where: where }); } export function getTorrentsBasedOnTitle(titleQuery, type) { - return getTorrentsBasedOnQuery({ title: { [Op.regexp]: `${titleQuery}` }, type: type }); + return getTorrentsBasedOnQuery({ title: { [Op.regexp]: `${titleQuery}` }, type: type }); } export function getTorrentsBasedOnQuery(where) { - return Torrent.findAll({ where: where }); + return Torrent.findAll({ where: where }); } export function getFilesBasedOnQuery(where) { - return File.findAll({ where: where }); + return File.findAll({ where: where }); } export function getUnprocessedIngestedTorrents() { @@ -226,7 +227,7 @@ export function getUnprocessedIngestedTorrents() { [Op.or]: ['tv', 'movies'] } }, - + }); } @@ -239,142 +240,142 @@ export function setIngestedTorrentsProcessed(ingestedTorrents) { } export function getTorrentsWithoutSize() { - return Torrent.findAll({ - where: literal( - 'exists (select 1 from files where files."infoHash" = torrent."infoHash" and files.size = 300000000)'), - order: [ - ['seeders', 'DESC'] - ] - }); + return Torrent.findAll({ + where: literal( + 'exists (select 1 from files where files."infoHash" = torrent."infoHash" and files.size = 300000000)'), + order: [ + ['seeders', 'DESC'] + ] + }); } export function getUpdateSeedersTorrents(limit = 50) { - const until = moment().subtract(7, 'days').format('YYYY-MM-DD'); - return Torrent.findAll({ - where: literal(`torrent."updatedAt" < '${until}'`), - limit: limit, - order: [ - ['seeders', 'DESC'], - ['updatedAt', 'ASC'] - ] - }); + const until = moment().subtract(7, 'days').format('YYYY-MM-DD'); + return Torrent.findAll({ + where: literal(`torrent."updatedAt" < '${until}'`), + limit: limit, + order: [ + ['seeders', 'DESC'], + ['updatedAt', 'ASC'] + ] + }); } export function getUpdateSeedersNewTorrents(limit = 50) { - const lastUpdate = moment().subtract(12, 'hours').format('YYYY-MM-DD'); - const createdAfter = moment().subtract(4, 'days').format('YYYY-MM-DD'); - return Torrent.findAll({ - where: literal(`torrent."updatedAt" < '${lastUpdate}' AND torrent."createdAt" > '${createdAfter}'`), - limit: limit, - order: [ - ['seeders', 'ASC'], - ['updatedAt', 'ASC'] - ] - }); + const lastUpdate = moment().subtract(12, 'hours').format('YYYY-MM-DD'); + const createdAfter = moment().subtract(4, 'days').format('YYYY-MM-DD'); + return Torrent.findAll({ + where: literal(`torrent."updatedAt" < '${lastUpdate}' AND torrent."createdAt" > '${createdAfter}'`), + limit: limit, + order: [ + ['seeders', 'ASC'], + ['updatedAt', 'ASC'] + ] + }); } export function getNoContentsTorrents() { - return Torrent.findAll({ - where: { opened: false, seeders: { [Op.gte]: 1 } }, - limit: 500, - order: [[fn('RANDOM')]] - }); + return Torrent.findAll({ + where: { opened: false, seeders: { [Op.gte]: 1 } }, + limit: 500, + order: [[fn('RANDOM')]] + }); } export function createTorrent(torrent) { - return Torrent.upsert(torrent) - .then(() => createContents(torrent.infoHash, torrent.contents)) - .then(() => createSubtitles(torrent.infoHash, torrent.subtitles)); + return Torrent.upsert(torrent) + .then(() => createContents(torrent.infoHash, torrent.contents)) + .then(() => createSubtitles(torrent.infoHash, torrent.subtitles)); } export function setTorrentSeeders(torrent, seeders) { - const where = torrent.infoHash - ? { infoHash: torrent.infoHash } - : { provider: torrent.provider, torrentId: torrent.torrentId } - return Torrent.update( - { seeders: seeders }, - { where: where } - ); + const where = torrent.infoHash + ? { infoHash: torrent.infoHash } + : { provider: torrent.provider, torrentId: torrent.torrentId } + return Torrent.update( + { seeders: seeders }, + { where: where } + ); } export function deleteTorrent(torrent) { - return Torrent.destroy({ where: { infoHash: torrent.infoHash } }) + return Torrent.destroy({ where: { infoHash: torrent.infoHash } }) } export function createFile(file) { - if (file.id) { - return (file.dataValues ? file.save() : File.upsert(file)) - .then(() => upsertSubtitles(file, file.subtitles)); - } - if (file.subtitles && file.subtitles.length) { - file.subtitles = file.subtitles.map(subtitle => ({ infoHash: file.infoHash, title: subtitle.path, ...subtitle })); - } - return File.create(file, { include: [Subtitle], ignoreDuplicates: true }); + if (file.id) { + return (file.dataValues ? file.save() : File.upsert(file)) + .then(() => upsertSubtitles(file, file.subtitles)); + } + if (file.subtitles && file.subtitles.length) { + file.subtitles = file.subtitles.map(subtitle => ({ infoHash: file.infoHash, title: subtitle.path, ...subtitle })); + } + return File.create(file, { include: [Subtitle], ignoreDuplicates: true }); } export function getFiles(torrent) { - return File.findAll({ where: { infoHash: torrent.infoHash } }); + return File.findAll({ where: { infoHash: torrent.infoHash } }); } export function getFilesBasedOnTitle(titleQuery) { - return File.findAll({ where: { title: { [Op.regexp]: `${titleQuery}` } } }); + return File.findAll({ where: { title: { [Op.regexp]: `${titleQuery}` } } }); } export function deleteFile(file) { - return File.destroy({ where: { id: file.id } }) + return File.destroy({ where: { id: file.id } }) } export function createSubtitles(infoHash, subtitles) { - if (subtitles && subtitles.length) { - return Subtitle.bulkCreate(subtitles.map(subtitle => ({ infoHash, title: subtitle.path, ...subtitle }))); - } - return Promise.resolve(); + if (subtitles && subtitles.length) { + return Subtitle.bulkCreate(subtitles.map(subtitle => ({ infoHash, title: subtitle.path, ...subtitle }))); + } + return Promise.resolve(); } export function upsertSubtitles(file, subtitles) { - if (file.id && subtitles && subtitles.length) { - return Promises.sequence(subtitles - .map(subtitle => { - subtitle.fileId = file.id; - subtitle.infoHash = subtitle.infoHash || file.infoHash; - subtitle.title = subtitle.title || subtitle.path; - return subtitle; - }) - .map(subtitle => () => subtitle.dataValues ? subtitle.save() : Subtitle.create(subtitle))); - } - return Promise.resolve(); + if (file.id && subtitles && subtitles.length) { + return Promises.sequence(subtitles + .map(subtitle => { + subtitle.fileId = file.id; + subtitle.infoHash = subtitle.infoHash || file.infoHash; + subtitle.title = subtitle.title || subtitle.path; + return subtitle; + }) + .map(subtitle => () => subtitle.dataValues ? subtitle.save() : Subtitle.create(subtitle))); + } + return Promise.resolve(); } export function getSubtitles(torrent) { - return Subtitle.findAll({ where: { infoHash: torrent.infoHash } }); + return Subtitle.findAll({ where: { infoHash: torrent.infoHash } }); } export function getUnassignedSubtitles() { - return Subtitle.findAll({ where: { fileId: null } }); + return Subtitle.findAll({ where: { fileId: null } }); } export function createContents(infoHash, contents) { - if (contents && contents.length) { - return Content.bulkCreate(contents.map(content => ({ infoHash, ...content })), { ignoreDuplicates: true }) - .then(() => Torrent.update({ opened: true }, { where: { infoHash: infoHash }, silent: true })); - } - return Promise.resolve(); + if (contents && contents.length) { + return Content.bulkCreate(contents.map(content => ({ infoHash, ...content })), { ignoreDuplicates: true }) + .then(() => Torrent.update({ opened: true }, { where: { infoHash: infoHash }, silent: true })); + } + return Promise.resolve(); } export function getContents(torrent) { - return Content.findAll({ where: { infoHash: torrent.infoHash } }); + return Content.findAll({ where: { infoHash: torrent.infoHash } }); } export function getSkipTorrent(torrent) { - return SkipTorrent.findByPk(torrent.infoHash) - .then((result) => { - if (!result) { - throw new Error(`torrent not found: ${torrent.infoHash}`); - } - return result.dataValues; - }) + return SkipTorrent.findByPk(torrent.infoHash) + .then((result) => { + if (!result) { + throw new Error(`torrent not found: ${torrent.infoHash}`); + } + return result.dataValues; + }) } export function createSkipTorrent(torrent) { - return SkipTorrent.upsert({ infoHash: torrent.infoHash }); + return SkipTorrent.upsert({ infoHash: torrent.infoHash }); } diff --git a/src/producer/Configuration/scrapers.json b/src/producer/Configuration/scrapers.json index aa305917..5cd9b5da 100644 --- a/src/producer/Configuration/scrapers.json +++ b/src/producer/Configuration/scrapers.json @@ -1,6 +1,5 @@ { "ScrapeConfiguration": { - "StorageConnectionString": "", "Scrapers": [ { "Name": "SyncEzTvJob", diff --git a/src/producer/Crawlers/Sites/DebridMediaManagerCrawler.cs b/src/producer/Crawlers/Sites/DebridMediaManagerCrawler.cs index dabb7389..8daf3ae3 100644 --- a/src/producer/Crawlers/Sites/DebridMediaManagerCrawler.cs +++ b/src/producer/Crawlers/Sites/DebridMediaManagerCrawler.cs @@ -1,3 +1,5 @@ +using Producer.Models.Configuration; + namespace Producer.Crawlers.Sites; public partial class DebridMediaManagerCrawler( diff --git a/src/producer/Extensions/ConfigurationExtensions.cs b/src/producer/Extensions/ConfigurationExtensions.cs index b321cab0..08fb5317 100644 --- a/src/producer/Extensions/ConfigurationExtensions.cs +++ b/src/producer/Extensions/ConfigurationExtensions.cs @@ -1,3 +1,5 @@ +using Producer.Models.Configuration; + namespace Producer.Extensions; public static class ConfigurationExtensions diff --git a/src/producer/Extensions/ServiceCollectionExtensions.cs b/src/producer/Extensions/ServiceCollectionExtensions.cs index 7ce067af..3461e77e 100644 --- a/src/producer/Extensions/ServiceCollectionExtensions.cs +++ b/src/producer/Extensions/ServiceCollectionExtensions.cs @@ -1,3 +1,5 @@ +using Producer.Models.Configuration; + namespace Producer.Extensions; public static class ServiceCollectionExtensions @@ -20,6 +22,7 @@ internal static IServiceCollection AddCrawlers(this IServiceCollection services) internal static IServiceCollection AddDataStorage(this IServiceCollection services) { + services.LoadConfigurationFromEnv(); services.AddTransient(); services.AddTransient(); return services; @@ -36,9 +39,9 @@ internal static IServiceCollection RegisterMassTransit(this IServiceCollection s services.AddMassTransit(busConfigurator => { busConfigurator.SetKebabCaseEndpointNameFormatter(); - busConfigurator.UsingRabbitMq((context, busFactoryConfigurator) => + busConfigurator.UsingRabbitMq((_, busFactoryConfigurator) => { - busFactoryConfigurator.Host(rabbitConfig!.Host, hostConfigurator => + busFactoryConfigurator.Host(rabbitConfig.Host, hostConfigurator => { hostConfigurator.Username(rabbitConfig.Username); hostConfigurator.Password(rabbitConfig.Password); @@ -51,9 +54,9 @@ internal static IServiceCollection RegisterMassTransit(this IServiceCollection s internal static IServiceCollection AddQuartz(this IServiceCollection services, IConfiguration configuration) { - var scrapeConfiguration = LoadScrapeConfiguration(services, configuration); - var githubConfiguration = LoadGithubConfiguration(services, configuration); - var rabbitConfig = LoadRabbitMQConfiguration(services, configuration); + var scrapeConfiguration = services.LoadConfigurationFromConfig(configuration, ScrapeConfiguration.SectionName); + var githubConfiguration = services.LoadConfigurationFromConfig(configuration, GithubConfiguration.SectionName); + var rabbitConfig = services.LoadConfigurationFromConfig(configuration, RabbitMqConfiguration.SectionName); services .AddTransient() @@ -92,46 +95,29 @@ internal static IServiceCollection AddQuartz(this IServiceCollection services, I return services; } - - private static GithubConfiguration LoadGithubConfiguration(IServiceCollection services, IConfiguration configuration) + + private static TConfiguration LoadConfigurationFromConfig(this IServiceCollection services, IConfiguration configuration, string sectionName) + where TConfiguration : class { - var githubConfiguration = configuration.GetSection(GithubConfiguration.SectionName).Get(); - - ArgumentNullException.ThrowIfNull(githubConfiguration, nameof(githubConfiguration)); + var instance = configuration.GetSection(sectionName).Get(); - services.TryAddSingleton(githubConfiguration); - - return githubConfiguration; - } - - private static RabbitMqConfiguration LoadRabbitMQConfiguration(IServiceCollection services, IConfiguration configuration) - { - var rabbitConfiguration = configuration.GetSection(RabbitMqConfiguration.SectionName).Get(); + ArgumentNullException.ThrowIfNull(instance, nameof(instance)); - ArgumentNullException.ThrowIfNull(rabbitConfiguration, nameof(rabbitConfiguration)); + services.TryAddSingleton(instance); - if (rabbitConfiguration.MaxQueueSize > 0) - { - if (rabbitConfiguration.MaxPublishBatchSize > rabbitConfiguration.MaxQueueSize) - { - throw new InvalidOperationException("MaxPublishBatchSize cannot be greater than MaxQueueSize in RabbitMqConfiguration"); - } - } - - services.TryAddSingleton(rabbitConfiguration); - - return rabbitConfiguration; + return instance; } - - private static ScrapeConfiguration LoadScrapeConfiguration(IServiceCollection services, IConfiguration configuration) + + private static TConfiguration LoadConfigurationFromEnv(this IServiceCollection services) + where TConfiguration : class { - var scrapeConfiguration = configuration.GetSection(ScrapeConfiguration.SectionName).Get(); + var instance = Activator.CreateInstance(); - ArgumentNullException.ThrowIfNull(scrapeConfiguration, nameof(scrapeConfiguration)); + ArgumentNullException.ThrowIfNull(instance, nameof(instance)); - services.TryAddSingleton(scrapeConfiguration); + services.TryAddSingleton(instance); - return scrapeConfiguration; + return instance; } private static void AddJobWithTrigger( diff --git a/src/producer/GlobalUsings.cs b/src/producer/GlobalUsings.cs index 1a7749d7..0bf23aae 100644 --- a/src/producer/GlobalUsings.cs +++ b/src/producer/GlobalUsings.cs @@ -2,6 +2,7 @@ global using System.Text; global using System.Text.Json; +global using System.Text.Json.Serialization; global using System.Text.RegularExpressions; global using System.Xml.Linq; global using Dapper; diff --git a/src/producer/Models/GithubConfiguration.cs b/src/producer/Models/Configuration/GithubConfiguration.cs similarity index 80% rename from src/producer/Models/GithubConfiguration.cs rename to src/producer/Models/Configuration/GithubConfiguration.cs index 429b8d0b..0e257492 100644 --- a/src/producer/Models/GithubConfiguration.cs +++ b/src/producer/Models/Configuration/GithubConfiguration.cs @@ -1,4 +1,4 @@ -namespace Producer.Models; +namespace Producer.Models.Configuration; public class GithubConfiguration { diff --git a/src/producer/Models/Configuration/PostgresConfiguration.cs b/src/producer/Models/Configuration/PostgresConfiguration.cs new file mode 100644 index 00000000..fd3fb8d9 --- /dev/null +++ b/src/producer/Models/Configuration/PostgresConfiguration.cs @@ -0,0 +1,29 @@ +namespace Producer.Models.Configuration; + +public class PostgresConfiguration +{ + private const string Prefix = "POSTGRES"; + private const string HostVariable = "HOST"; + private const string UsernameVariable = "USER"; + private const string PasswordVariable = "PASSWORD"; + private const string DatabaseVariable = "DB"; + private const string PortVariable = "PORT"; + + private string Host { get; init; } = Environment.GetEnvironmentVariable($"{Prefix}_{HostVariable}") ?? + throw new InvalidOperationException($"Environment variable {Prefix}_{HostVariable} is not set"); + + private string Username { get; init; } = Environment.GetEnvironmentVariable($"{Prefix}_{UsernameVariable}") ?? + throw new InvalidOperationException($"Environment variable {Prefix}_{UsernameVariable} is not set"); + + private string Password { get; init; } = Environment.GetEnvironmentVariable($"{Prefix}_{PasswordVariable}") ?? + throw new InvalidOperationException($"Environment variable {Prefix}_{PasswordVariable} is not set"); + + private string Database { get; init; } = Environment.GetEnvironmentVariable($"{Prefix}_{DatabaseVariable}") ?? + throw new InvalidOperationException($"Environment variable {Prefix}_{DatabaseVariable} is not set"); + + private int PORT { get; init; } = int.Parse( + Environment.GetEnvironmentVariable($"{Prefix}_{PortVariable}") ?? + throw new InvalidOperationException($"Environment variable {Prefix}_{PortVariable} is not set")); + + public string StorageConnectionString => $"Host={Host};Port={PORT};Username={Username};Password={Password};Database={Database};"; +} \ No newline at end of file diff --git a/src/producer/Models/Configuration/RabbitMqConfiguration.cs b/src/producer/Models/Configuration/RabbitMqConfiguration.cs new file mode 100644 index 00000000..da2f17a2 --- /dev/null +++ b/src/producer/Models/Configuration/RabbitMqConfiguration.cs @@ -0,0 +1,39 @@ +namespace Producer.Models.Configuration; + +public class RabbitMqConfiguration +{ + public const string SectionName = "RabbitMqConfiguration"; + public const string Filename = "rabbitmq.json"; + + public string? Host { get; set; } + public string? Username { get; set; } + public string? Password { get; set; } + public string? QueueName { get; set; } + public bool Durable { get; set; } + public int MaxQueueSize { get; set; } + public int MaxPublishBatchSize { get; set; } = 500; + public int PublishIntervalInSeconds { get; set; } = 1000 * 10; + + public void Validate() + { + if (MaxQueueSize == 0) + { + return; + } + + if (MaxQueueSize < 0) + { + throw new InvalidOperationException("MaxQueueSize cannot be less than 0 in RabbitMqConfiguration"); + } + + if (MaxPublishBatchSize < 0) + { + throw new InvalidOperationException("MaxPublishBatchSize cannot be less than 0 in RabbitMqConfiguration"); + } + + if (MaxPublishBatchSize > MaxQueueSize) + { + throw new InvalidOperationException("MaxPublishBatchSize cannot be greater than MaxQueueSize in RabbitMqConfiguration"); + } + } +} \ No newline at end of file diff --git a/src/producer/Models/ScrapeConfiguration.cs b/src/producer/Models/Configuration/ScrapeConfiguration.cs similarity index 69% rename from src/producer/Models/ScrapeConfiguration.cs rename to src/producer/Models/Configuration/ScrapeConfiguration.cs index 716e32a6..1073b0c7 100644 --- a/src/producer/Models/ScrapeConfiguration.cs +++ b/src/producer/Models/Configuration/ScrapeConfiguration.cs @@ -1,4 +1,4 @@ -namespace Producer.Models; +namespace Producer.Models.Configuration; public class ScrapeConfiguration { @@ -6,5 +6,4 @@ public class ScrapeConfiguration public const string Filename = "scrapers.json"; public List Scrapers { get; set; } = []; - public string StorageConnectionString { get; set; } = ""; } \ No newline at end of file diff --git a/src/producer/Models/RabbitMqConfiguration.cs b/src/producer/Models/RabbitMqConfiguration.cs deleted file mode 100644 index ccc02777..00000000 --- a/src/producer/Models/RabbitMqConfiguration.cs +++ /dev/null @@ -1,16 +0,0 @@ -namespace Producer.Models; - -public class RabbitMqConfiguration -{ - public const string SectionName = "RabbitMqConfiguration"; - public const string Filename = "rabbitmq.json"; - - public string? Host { get; set; } - public string? Username { get; set; } - public string? Password { get; set; } - public string? QueueName { get; set; } - public bool Durable { get; set; } - public int MaxQueueSize { get; set; } - public int MaxPublishBatchSize { get; set; } = 500; - public int PublishIntervalInSeconds { get; set; } = 1000 * 10; -} \ No newline at end of file diff --git a/src/producer/Producer.csproj b/src/producer/Producer.csproj index d54c9ecd..787cb5c0 100644 --- a/src/producer/Producer.csproj +++ b/src/producer/Producer.csproj @@ -40,6 +40,7 @@ Always + diff --git a/src/producer/Services/DapperDataStorage.cs b/src/producer/Services/DapperDataStorage.cs index 5fe352e7..0c6fa3ad 100644 --- a/src/producer/Services/DapperDataStorage.cs +++ b/src/producer/Services/DapperDataStorage.cs @@ -1,6 +1,8 @@ +using Producer.Models.Configuration; + namespace Producer.Services; -public class DapperDataStorage(ScrapeConfiguration configuration, RabbitMqConfiguration rabbitConfig, ILogger logger) : IDataStorage +public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConfiguration rabbitConfig, ILogger logger) : IDataStorage { private const string InsertTorrentSql = """ diff --git a/src/producer/Services/TorrentPublisher.cs b/src/producer/Services/TorrentPublisher.cs index ac397454..cb8434b9 100644 --- a/src/producer/Services/TorrentPublisher.cs +++ b/src/producer/Services/TorrentPublisher.cs @@ -1,4 +1,6 @@ -namespace Producer.Services; +using Producer.Models.Configuration; + +namespace Producer.Services; public class TorrentPublisher( ISendEndpointProvider sendEndpointProvider,