Merge pull request #46 from purple-emily/change-to-single-env-file

Simplify the environment variables
This commit is contained in:
iPromKnight
2024-02-04 15:56:25 +00:00
committed by GitHub
25 changed files with 588 additions and 457 deletions

39
.env.example Normal file
View File

@@ -0,0 +1,39 @@
# General environment variables
TZ=London/Europe
# PostgreSQL
POSTGRES_HOST=postgres
POSTGRES_PORT=5432
POSTGRES_USER=postgres
POSTGRES_PASSWORD=postgres
POSTGRES_DB=knightcrawler
# MongoDB
MONGODB_HOST=mongodb
MONGODB_PORT=27017
MONGODB_DB=knightcrawler
MONGO_INITDB_ROOT_USERNAME=mongo
MONGO_INITDB_ROOT_PASSWORD=mongo
# Addon
DEBUG_MODE=false
# Consumer
RABBIT_URI=amqp://guest:guest@rabbitmq:5672/?heartbeat=30
QUEUE_NAME=ingested
JOB_CONCURRENCY=5
JOBS_ENABLED=true
MAX_SINGLE_TORRENT_CONNECTIONS=10
TORRENT_TIMEOUT=30000
UDP_TRACKERS_ENABLED=true
# Producer
RabbitMqConfiguration__Host=rabbitmq
RabbitMqConfiguration__QueueName=ingested
RabbitMqConfiguration__Username=guest
RabbitMqConfiguration__Password=guest
RabbitMqConfiguration__Durable=true
RabbitMqConfiguration__MaxQueueSize=0
RabbitMqConfiguration__MaxPublishBatchSize=500
RabbitMqConfiguration__PublishIntervalInSeconds=10
GithubSettings__PAT=

1
.gitignore vendored
View File

@@ -2,6 +2,7 @@
.now .now
.DS_Store .DS_Store
.idea .idea
.env
## Ignore Visual Studio temporary files, build results, and ## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons. ## files generated by popular Visual Studio add-ons.

View File

@@ -13,6 +13,7 @@ A self-hosted Stremio addon for streaming torrents via a debrid service.
- [Overview](#overview) - [Overview](#overview)
- [Using](#using) - [Using](#using)
- [Initial setup (optional)](#initial-setup-optional) - [Initial setup (optional)](#initial-setup-optional)
- [Environment Setup](#environment-setup)
- [Run the project](#run-the-project) - [Run the project](#run-the-project)
- [Monitoring with Grafana and Prometheus (Optional)](#monitoring-with-grafana-and-prometheus-optional) - [Monitoring with Grafana and Prometheus (Optional)](#monitoring-with-grafana-and-prometheus-optional)
- [Accessing RabbitMQ Management](#accessing-rabbitmq-management) - [Accessing RabbitMQ Management](#accessing-rabbitmq-management)
@@ -60,11 +61,22 @@ We can search DebridMediaManager hash lists which are hosted on GitHub. This all
GithubSettings__PAT=<YOUR TOKEN HERE> GithubSettings__PAT=<YOUR TOKEN HERE>
``` ```
### Environment Setup
Before running the project, you need to set up the environment variables. Copy the `.env.example` file to `.env`:
```sh
cp .env.example .env
```
Then set any of th values you'd like to customize.
### Run the project ### Run the project
Open a terminal in the directory and run the command: Open a terminal in the directory and run the command:
``` sh ```sh
docker compose up -d docker compose up -d
``` ```
@@ -108,6 +120,7 @@ Now, you can use these dashboards to monitor RabbitMQ and Postgres metrics.
Note: If you encounter issues with missing or unavailable data in Grafana, please ensure on [Prometheus's target page](http://127.0.0.1:9090/targets) that the RabbitMQ target is up and running. Note: If you encounter issues with missing or unavailable data in Grafana, please ensure on [Prometheus's target page](http://127.0.0.1:9090/targets) that the RabbitMQ target is up and running.
## Importing external dumps ## Importing external dumps
A brief record of the steps required to import external data, in this case the rarbg dump which can be found on RD: A brief record of the steps required to import external data, in this case the rarbg dump which can be found on RD:

View File

@@ -35,10 +35,9 @@ x-apps: &knightcrawler-app
services: services:
postgres: postgres:
image: postgres:latest image: postgres:latest
env_file:
- .env
environment: environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: knightcrawler
PGUSER: postgres # needed for healthcheck. PGUSER: postgres # needed for healthcheck.
ports: ports:
- "5432:5432" - "5432:5432"
@@ -51,9 +50,8 @@ services:
mongodb: mongodb:
image: mongo:latest image: mongo:latest
environment: env_file:
MONGO_INITDB_ROOT_USERNAME: mongo - .env
MONGO_INITDB_ROOT_PASSWORD: mongo
ports: ports:
- "27017:27017" - "27017:27017"
volumes: volumes:
@@ -81,7 +79,7 @@ services:
context: src/producer context: src/producer
dockerfile: Dockerfile dockerfile: Dockerfile
env_file: env_file:
- env/producer.env - .env
<<: *knightcrawler-app <<: *knightcrawler-app
networks: networks:
- knightcrawler-network - knightcrawler-network
@@ -91,7 +89,7 @@ services:
context: src/node/consumer context: src/node/consumer
dockerfile: Dockerfile dockerfile: Dockerfile
env_file: env_file:
- env/consumer.env - .env
deploy: deploy:
replicas: 3 replicas: 3
<<: *knightcrawler-app <<: *knightcrawler-app
@@ -105,7 +103,7 @@ services:
ports: ports:
- "7000:7000" - "7000:7000"
env_file: env_file:
- env/addon.env - .env
<<: *knightcrawler-app <<: *knightcrawler-app
networks: networks:
- knightcrawler-network - knightcrawler-network

4
env/addon.env vendored
View File

@@ -1,4 +0,0 @@
TZ=London/Europe
DATABASE_URI=postgres://postgres:postgres@postgres/knightcrawler
MONGODB_URI=mongodb://mongo:mongo@mongodb/knightcrawler?tls=false&authSource=admin
DEBUG_MODE=false

11
env/consumer.env vendored
View File

@@ -1,11 +0,0 @@
TZ=London/Europe
MONGODB_URI=mongodb://mongo:mongo@mongodb/knightcrawler?tls=false&authSource=admin
DATABASE_URI=postgres://postgres:postgres@postgres/knightcrawler
RABBIT_URI=amqp://guest:guest@rabbitmq:5672/?heartbeat=30
QUEUE_NAME=ingested
JOB_CONCURRENCY=5
JOBS_ENABLED=true
ENABLE_SYNC=true
MAX_SINGLE_TORRENT_CONNECTIONS=10
TORRENT_TIMEOUT=30000
UDP_TRACKERS_ENABLED=true

10
env/producer.env vendored
View File

@@ -1,10 +0,0 @@
ScrapeConfiguration__StorageConnectionString=host=postgres;username=postgres;password=postgres;database=knightcrawler;
RabbitMqConfiguration__Host=rabbitmq
RabbitMqConfiguration__QueueName=ingested
RabbitMqConfiguration__Username=guest
RabbitMqConfiguration__Password=guest
RabbitMqConfiguration__Durable=true
RabbitMqConfiguration__MaxQueueSize=0
RabbitMqConfiguration__MaxPublishBatchSize=500
RabbitMqConfiguration__PublishIntervalInSeconds=10
GithubSettings__PAT=

View File

@@ -1,6 +1,7 @@
import cacheManager from 'cache-manager'; import cacheManager from 'cache-manager';
import mangodbStore from 'cache-manager-mongodb'; import mangodbStore from 'cache-manager-mongodb';
import { isStaticUrl } from '../moch/static.js'; import { cacheConfig } from './config.js';
import { isStaticUrl } from '../moch/static.js';
const GLOBAL_KEY_PREFIX = 'knightcrawler-addon'; const GLOBAL_KEY_PREFIX = 'knightcrawler-addon';
const STREAM_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|stream`; const STREAM_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|stream`;
@@ -14,90 +15,87 @@ const AVAILABILITY_EMPTY_TTL = 30 * 60; // 30 minutes
const MESSAGE_VIDEO_URL_TTL = 60; // 1 minutes const MESSAGE_VIDEO_URL_TTL = 60; // 1 minutes
// When the streams are empty we want to cache it for less time in case of timeouts or failures // When the streams are empty we want to cache it for less time in case of timeouts or failures
const MONGO_URI = process.env.MONGODB_URI;
const NO_CACHE = process.env.NO_CACHE || false;
const memoryCache = initiateMemoryCache(); const memoryCache = initiateMemoryCache();
const remoteCache = initiateRemoteCache(); const remoteCache = initiateRemoteCache();
function initiateRemoteCache() { function initiateRemoteCache() {
if (NO_CACHE) { if (cacheConfig.NO_CACHE) {
return null; return null;
} else if (MONGO_URI) { } else if (cacheConfig.MONGO_URI) {
return cacheManager.caching({ return cacheManager.caching({
store: mangodbStore, store: mangodbStore,
uri: MONGO_URI, uri: cacheConfig.MONGO_URI,
options: { options: {
collection: 'knightcrawler_addon_collection', collection: 'knightcrawler_addon_collection',
socketTimeoutMS: 120000, socketTimeoutMS: 120000,
useNewUrlParser: true, useNewUrlParser: true,
useUnifiedTopology: false, useUnifiedTopology: false,
ttl: STREAM_EMPTY_TTL ttl: STREAM_EMPTY_TTL
}, },
ttl: STREAM_EMPTY_TTL, ttl: STREAM_EMPTY_TTL,
ignoreCacheErrors: true ignoreCacheErrors: true
}); });
} else { } else {
return cacheManager.caching({ return cacheManager.caching({
store: 'memory', store: 'memory',
ttl: STREAM_EMPTY_TTL ttl: STREAM_EMPTY_TTL
}); });
} }
} }
function initiateMemoryCache() { function initiateMemoryCache() {
return cacheManager.caching({ return cacheManager.caching({
store: 'memory', store: 'memory',
ttl: MESSAGE_VIDEO_URL_TTL, ttl: MESSAGE_VIDEO_URL_TTL,
max: Infinity // infinite LRU cache size max: Infinity // infinite LRU cache size
}); });
} }
function cacheWrap(cache, key, method, options) { function cacheWrap(cache, key, method, options) {
if (NO_CACHE || !cache) { if (cacheConfig.NO_CACHE || !cache) {
return method(); return method();
} }
return cache.wrap(key, method, options); return cache.wrap(key, method, options);
} }
export function cacheWrapStream(id, method) { export function cacheWrapStream(id, method) {
return cacheWrap(remoteCache, `${STREAM_KEY_PREFIX}:${id}`, method, { return cacheWrap(remoteCache, `${STREAM_KEY_PREFIX}:${id}`, method, {
ttl: (streams) => streams.length ? STREAM_TTL : STREAM_EMPTY_TTL ttl: (streams) => streams.length ? STREAM_TTL : STREAM_EMPTY_TTL
}); });
} }
export function cacheWrapResolvedUrl(id, method) { export function cacheWrapResolvedUrl(id, method) {
return cacheWrap(memoryCache, `${RESOLVED_URL_KEY_PREFIX}:${id}`, method, { return cacheWrap(memoryCache, `${RESOLVED_URL_KEY_PREFIX}:${id}`, method, {
ttl: (url) => isStaticUrl(url) ? MESSAGE_VIDEO_URL_TTL : STREAM_TTL ttl: (url) => isStaticUrl(url) ? MESSAGE_VIDEO_URL_TTL : STREAM_TTL
}); });
} }
export function cacheAvailabilityResults(results) { export function cacheAvailabilityResults(results) {
Object.keys(results) Object.keys(results)
.forEach(infoHash => { .forEach(infoHash => {
const key = `${AVAILABILITY_KEY_PREFIX}:${infoHash}`; const key = `${AVAILABILITY_KEY_PREFIX}:${infoHash}`;
const value = results[infoHash]; const value = results[infoHash];
const ttl = value?.length ? AVAILABILITY_TTL : AVAILABILITY_EMPTY_TTL; const ttl = value?.length ? AVAILABILITY_TTL : AVAILABILITY_EMPTY_TTL;
memoryCache.set(key, value, { ttl }) memoryCache.set(key, value, { ttl })
}); });
return results; return results;
} }
export function getCachedAvailabilityResults(infoHashes) { export function getCachedAvailabilityResults(infoHashes) {
const keys = infoHashes.map(infoHash => `${AVAILABILITY_KEY_PREFIX}:${infoHash}`) const keys = infoHashes.map(infoHash => `${AVAILABILITY_KEY_PREFIX}:${infoHash}`)
return new Promise(resolve => { return new Promise(resolve => {
memoryCache.mget(...keys, (error, result) => { memoryCache.mget(...keys, (error, result) => {
if (error) { if (error) {
console.log('Failed retrieve availability cache', error) console.log('Failed retrieve availability cache', error)
return resolve({}); return resolve({});
} }
const availabilityResults = {}; const availabilityResults = {};
infoHashes.forEach((infoHash, index) => { infoHashes.forEach((infoHash, index) => {
if (result[index]) { if (result[index]) {
availabilityResults[infoHash] = result[index]; availabilityResults[infoHash] = result[index];
} }
}); });
resolve(availabilityResults); resolve(availabilityResults);
}) })
}); });
} }

View File

@@ -0,0 +1,38 @@
export const cacheConfig = {
MONGODB_HOST: process.env.MONGODB_HOST || 'mongodb',
MONGODB_PORT: process.env.MONGODB_PORT || '27017',
MONGODB_DB: process.env.MONGODB_DB || 'selfhostio',
MONGO_INITDB_ROOT_USERNAME: process.env.MONGO_INITDB_ROOT_USERNAME || 'mongo',
MONGO_INITDB_ROOT_PASSWORD: process.env.MONGO_INITDB_ROOT_PASSWORD || 'mongo',
COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'selfhostio_consumer_collection',
NO_CACHE: parseBool(process.env.NO_CACHE, false),
}
// Combine the environment variables into a connection string
// The combined string will look something like:
// 'mongodb://mongo:mongo@localhost:27017/selfhostio?authSource=admin'
cacheConfig.MONGO_URI = 'mongodb://' + cacheConfig.MONGO_INITDB_ROOT_USERNAME + ':' + cacheConfig.MONGO_INITDB_ROOT_PASSWORD + '@' + cacheConfig.MONGODB_HOST + ':' + cacheConfig.MONGODB_PORT + '/' + cacheConfig.MONGODB_DB + '?authSource=admin';
export const databaseConfig = {
POSTGRES_HOST: process.env.POSTGRES_HOST || 'postgres',
POSTGRES_PORT: process.env.POSTGRES_PORT || '5432',
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE || 'selfhostio',
POSTGRES_USERNAME: process.env.POSTGRES_USERNAME || 'postgres',
POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD || 'postgres',
}
// Combine the environment variables into a connection string
// The combined string will look something like:
// 'postgres://postgres:postgres@localhost:5432/selfhostio'
databaseConfig.POSTGRES_URI = 'postgres://' + databaseConfig.POSTGRES_USERNAME + ':' + databaseConfig.POSTGRES_PASSWORD + '@' + databaseConfig.POSTGRES_HOST + ':' + databaseConfig.POSTGRES_PORT + '/' + databaseConfig.POSTGRES_DATABASE;
function parseBool(boolString, defaultValue) {
const isString = typeof boolString === 'string' || boolString instanceof String;
if (!isString) {
return defaultValue;
}
return boolString.toLowerCase() === 'true' ? true : defaultValue;
}

View File

@@ -1,64 +1,68 @@
import { Sequelize } from 'sequelize'; import { Sequelize } from 'sequelize';
import { databaseConfig } from './config.js';
const { Op } = Sequelize; const { Op } = Sequelize;
const DATABASE_URI = process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/postgres'; const database = new Sequelize(
databaseConfig.POSTGRES_URI,
const database = new Sequelize(DATABASE_URI, { logging: false }); {
logging: false
}
);
const Torrent = database.define('torrent', const Torrent = database.define('torrent',
{ {
infoHash: { type: Sequelize.STRING(64), primaryKey: true }, infoHash: { type: Sequelize.STRING(64), primaryKey: true },
provider: { type: Sequelize.STRING(32), allowNull: false }, provider: { type: Sequelize.STRING(32), allowNull: false },
torrentId: { type: Sequelize.STRING(128) }, torrentId: { type: Sequelize.STRING(128) },
title: { type: Sequelize.STRING(256), allowNull: false }, title: { type: Sequelize.STRING(256), allowNull: false },
size: { type: Sequelize.BIGINT }, size: { type: Sequelize.BIGINT },
type: { type: Sequelize.STRING(16), allowNull: false }, type: { type: Sequelize.STRING(16), allowNull: false },
uploadDate: { type: Sequelize.DATE, allowNull: false }, uploadDate: { type: Sequelize.DATE, allowNull: false },
seeders: { type: Sequelize.SMALLINT }, seeders: { type: Sequelize.SMALLINT },
trackers: { type: Sequelize.STRING(4096) }, trackers: { type: Sequelize.STRING(4096) },
languages: { type: Sequelize.STRING(4096) }, languages: { type: Sequelize.STRING(4096) },
resolution: { type: Sequelize.STRING(16) } resolution: { type: Sequelize.STRING(16) }
} }
); );
const File = database.define('file', const File = database.define('file',
{ {
id: { type: Sequelize.BIGINT, autoIncrement: true, primaryKey: true }, id: { type: Sequelize.BIGINT, autoIncrement: true, primaryKey: true },
infoHash: { infoHash: {
type: Sequelize.STRING(64), type: Sequelize.STRING(64),
allowNull: false, allowNull: false,
references: { model: Torrent, key: 'infoHash' }, references: { model: Torrent, key: 'infoHash' },
onDelete: 'CASCADE' onDelete: 'CASCADE'
}, },
fileIndex: { type: Sequelize.INTEGER }, fileIndex: { type: Sequelize.INTEGER },
title: { type: Sequelize.STRING(256), allowNull: false }, title: { type: Sequelize.STRING(256), allowNull: false },
size: { type: Sequelize.BIGINT }, size: { type: Sequelize.BIGINT },
imdbId: { type: Sequelize.STRING(32) }, imdbId: { type: Sequelize.STRING(32) },
imdbSeason: { type: Sequelize.INTEGER }, imdbSeason: { type: Sequelize.INTEGER },
imdbEpisode: { type: Sequelize.INTEGER }, imdbEpisode: { type: Sequelize.INTEGER },
kitsuId: { type: Sequelize.INTEGER }, kitsuId: { type: Sequelize.INTEGER },
kitsuEpisode: { type: Sequelize.INTEGER } kitsuEpisode: { type: Sequelize.INTEGER }
}, },
); );
const Subtitle = database.define('subtitle', const Subtitle = database.define('subtitle',
{ {
infoHash: { infoHash: {
type: Sequelize.STRING(64), type: Sequelize.STRING(64),
allowNull: false, allowNull: false,
references: { model: Torrent, key: 'infoHash' }, references: { model: Torrent, key: 'infoHash' },
onDelete: 'CASCADE' onDelete: 'CASCADE'
}, },
fileIndex: { type: Sequelize.INTEGER, allowNull: false }, fileIndex: { type: Sequelize.INTEGER, allowNull: false },
fileId: { fileId: {
type: Sequelize.BIGINT, type: Sequelize.BIGINT,
allowNull: true, allowNull: true,
references: { model: File, key: 'id' }, references: { model: File, key: 'id' },
onDelete: 'SET NULL' onDelete: 'SET NULL'
}, },
title: { type: Sequelize.STRING(512), allowNull: false }, title: { type: Sequelize.STRING(512), allowNull: false },
size: { type: Sequelize.BIGINT, allowNull: false }, size: { type: Sequelize.BIGINT, allowNull: false },
}, },
{ timestamps: false } { timestamps: false }
); );
@@ -69,66 +73,66 @@ File.hasMany(Subtitle, { foreignKey: 'fileId', constraints: false });
Subtitle.belongsTo(File, { foreignKey: 'fileId', constraints: false }); Subtitle.belongsTo(File, { foreignKey: 'fileId', constraints: false });
export function getTorrent(infoHash) { export function getTorrent(infoHash) {
return Torrent.findOne({ where: { infoHash: infoHash } }); return Torrent.findOne({ where: { infoHash: infoHash } });
} }
export function getFiles(infoHashes) { export function getFiles(infoHashes) {
return File.findAll({ where: { infoHash: { [Op.in]: infoHashes} } }); return File.findAll({ where: { infoHash: { [Op.in]: infoHashes } } });
} }
export function getImdbIdMovieEntries(imdbId) { export function getImdbIdMovieEntries(imdbId) {
return File.findAll({ return File.findAll({
where: { where: {
imdbId: { [Op.eq]: imdbId } imdbId: { [Op.eq]: imdbId }
}, },
include: [Torrent], include: [Torrent],
limit: 500, limit: 500,
order: [ order: [
[Torrent, 'size', 'DESC'] [Torrent, 'size', 'DESC']
] ]
}); });
} }
export function getImdbIdSeriesEntries(imdbId, season, episode) { export function getImdbIdSeriesEntries(imdbId, season, episode) {
return File.findAll({ return File.findAll({
where: { where: {
imdbId: { [Op.eq]: imdbId }, imdbId: { [Op.eq]: imdbId },
imdbSeason: { [Op.eq]: season }, imdbSeason: { [Op.eq]: season },
imdbEpisode: { [Op.eq]: episode } imdbEpisode: { [Op.eq]: episode }
}, },
include: [Torrent], include: [Torrent],
limit: 500, limit: 500,
order: [ order: [
[Torrent, 'size', 'DESC'] [Torrent, 'size', 'DESC']
] ]
}); });
} }
export function getKitsuIdMovieEntries(kitsuId) { export function getKitsuIdMovieEntries(kitsuId) {
return File.findAll({ return File.findAll({
where: { where: {
kitsuId: { [Op.eq]: kitsuId } kitsuId: { [Op.eq]: kitsuId }
}, },
include: [Torrent], include: [Torrent],
limit: 500, limit: 500,
order: [ order: [
[Torrent, 'size', 'DESC'] [Torrent, 'size', 'DESC']
] ]
}); });
} }
export function getKitsuIdSeriesEntries(kitsuId, episode) { export function getKitsuIdSeriesEntries(kitsuId, episode) {
return File.findAll({ return File.findAll({
where: { where: {
kitsuId: { [Op.eq]: kitsuId }, kitsuId: { [Op.eq]: kitsuId },
kitsuEpisode: { [Op.eq]: episode } kitsuEpisode: { [Op.eq]: episode }
}, },
include: [Torrent], include: [Torrent],
limit: 500, limit: 500,
order: [ order: [
[Torrent, 'size', 'DESC'] [Torrent, 'size', 'DESC']
] ]
}); });
} }

View File

@@ -4,16 +4,34 @@
} }
export const cacheConfig = { export const cacheConfig = {
MONGO_URI: process.env.MONGODB_URI || 'mongodb://mongo:mongo@localhost:27017/knightcrawler?authSource=admin', MONGODB_HOST: process.env.MONGODB_HOST || 'mongodb',
MONGODB_PORT: process.env.MONGODB_PORT || '27017',
MONGODB_DB: process.env.MONGODB_DB || 'knightcrawler',
MONGO_INITDB_ROOT_USERNAME: process.env.MONGO_INITDB_ROOT_USERNAME || 'mongo',
MONGO_INITDB_ROOT_PASSWORD: process.env.MONGO_INITDB_ROOT_PASSWORD || 'mongo',
NO_CACHE: parseBool(process.env.NO_CACHE, false), NO_CACHE: parseBool(process.env.NO_CACHE, false),
COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'knightcrawler_consumer_collection' COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'knightcrawler_consumer_collection'
} }
// Combine the environment variables into a connection string
// The combined string will look something like:
// 'mongodb://mongo:mongo@localhost:27017/knightcrawler?authSource=admin'
cacheConfig.MONGO_URI = 'mongodb://' + cacheConfig.MONGO_INITDB_ROOT_USERNAME + ':' + cacheConfig.MONGO_INITDB_ROOT_PASSWORD + '@' + cacheConfig.MONGODB_HOST + ':' + cacheConfig.MONGODB_PORT + '/' + cacheConfig.MONGODB_DB + '?authSource=admin';
export const databaseConfig = { export const databaseConfig = {
DATABASE_URI: process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/knightcrawler', POSTGRES_HOST: process.env.POSTGRES_HOST || 'postgres',
ENABLE_SYNC: parseBool(process.env.ENABLE_SYNC, true) POSTGRES_PORT: process.env.POSTGRES_PORT || '5432',
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE || 'knightcrawler',
POSTGRES_USERNAME: process.env.POSTGRES_USERNAME || 'postgres',
POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD || 'postgres',
ENABLE_SYNC: true
} }
// Combine the environment variables into a connection string
// The combined string will look something like:
// 'postgres://postgres:postgres@localhost:5432/knightcrawler'
databaseConfig.POSTGRES_URI = 'postgres://' + databaseConfig.POSTGRES_USERNAME + ':' + databaseConfig.POSTGRES_PASSWORD + '@' + databaseConfig.POSTGRES_HOST + ':' + databaseConfig.POSTGRES_PORT + '/' + databaseConfig.POSTGRES_DATABASE;
export const jobConfig = { export const jobConfig = {
JOB_CONCURRENCY: parseInt(process.env.JOB_CONCURRENCY || 1), JOB_CONCURRENCY: parseInt(process.env.JOB_CONCURRENCY || 1),
JOBS_ENABLED: parseBool(process.env.JOBS_ENABLED || true) JOBS_ENABLED: parseBool(process.env.JOBS_ENABLED || true)
@@ -36,10 +54,10 @@ export const torrentConfig = {
function parseBool(boolString, defaultValue) { function parseBool(boolString, defaultValue) {
const isString = typeof boolString === 'string' || boolString instanceof String; const isString = typeof boolString === 'string' || boolString instanceof String;
if (!isString) { if (!isString) {
return defaultValue; return defaultValue;
} }
return boolString.toLowerCase() === 'true' ? true : defaultValue; return boolString.toLowerCase() === 'true' ? true : defaultValue;
} }

View File

@@ -5,16 +5,16 @@ import {logger} from "./logger.js";
import * as Promises from './promises.js'; import * as Promises from './promises.js';
const database = new Sequelize( const database = new Sequelize(
databaseConfig.DATABASE_URI, databaseConfig.POSTGRES_URI,
{ {
logging: false logging: false
} }
); );
const Provider = database.define('provider', { const Provider = database.define('provider', {
name: { type: DataTypes.STRING(32), primaryKey: true }, name: { type: DataTypes.STRING(32), primaryKey: true },
lastScraped: { type: DataTypes.DATE }, lastScraped: { type: DataTypes.DATE },
lastScrapedId: { type: DataTypes.STRING(128) } lastScrapedId: { type: DataTypes.STRING(128) }
}); });
const IngestedTorrent = database.define('ingested_torrent', { const IngestedTorrent = database.define('ingested_torrent', {
@@ -30,7 +30,8 @@ const IngestedTorrent = database.define('ingested_torrent', {
processed: { processed: {
type: DataTypes.BOOLEAN, type: DataTypes.BOOLEAN,
defaultValue: false defaultValue: false
}}, }
},
{ {
indexes: [ indexes: [
{ {
@@ -42,9 +43,9 @@ const IngestedTorrent = database.define('ingested_torrent', {
/* eslint-disable no-unused-vars */ /* eslint-disable no-unused-vars */
const IngestedPage = database.define('ingested_page', { const IngestedPage = database.define('ingested_page', {
id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true }, id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true },
url: { type: DataTypes.STRING, allowNull: false }, url: { type: DataTypes.STRING, allowNull: false },
}, },
{ {
indexes: [ indexes: [
{ {
@@ -57,122 +58,122 @@ const IngestedPage = database.define('ingested_page', {
const Torrent = database.define('torrent', const Torrent = database.define('torrent',
{ {
infoHash: { type: DataTypes.STRING(64), primaryKey: true }, infoHash: { type: DataTypes.STRING(64), primaryKey: true },
provider: { type: DataTypes.STRING(32), allowNull: false }, provider: { type: DataTypes.STRING(32), allowNull: false },
torrentId: { type: DataTypes.STRING(512) }, torrentId: { type: DataTypes.STRING(512) },
title: { type: DataTypes.STRING(512), allowNull: false }, title: { type: DataTypes.STRING(512), allowNull: false },
size: { type: DataTypes.BIGINT }, size: { type: DataTypes.BIGINT },
type: { type: DataTypes.STRING(16), allowNull: false }, type: { type: DataTypes.STRING(16), allowNull: false },
uploadDate: { type: DataTypes.DATE, allowNull: false }, uploadDate: { type: DataTypes.DATE, allowNull: false },
seeders: { type: DataTypes.SMALLINT }, seeders: { type: DataTypes.SMALLINT },
trackers: { type: DataTypes.STRING(8000) }, trackers: { type: DataTypes.STRING(8000) },
languages: { type: DataTypes.STRING(4096) }, languages: { type: DataTypes.STRING(4096) },
resolution: { type: DataTypes.STRING(16) }, resolution: { type: DataTypes.STRING(16) },
reviewed: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false }, reviewed: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
opened: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false } opened: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false }
} }
); );
const File = database.define('file', const File = database.define('file',
{ {
id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true }, id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true },
infoHash: { infoHash: {
type: DataTypes.STRING(64), type: DataTypes.STRING(64),
allowNull: false, allowNull: false,
references: { model: Torrent, key: 'infoHash' }, references: { model: Torrent, key: 'infoHash' },
onDelete: 'CASCADE' onDelete: 'CASCADE'
}, },
fileIndex: { type: DataTypes.INTEGER }, fileIndex: { type: DataTypes.INTEGER },
title: { type: DataTypes.STRING(512), allowNull: false }, title: { type: DataTypes.STRING(512), allowNull: false },
size: { type: DataTypes.BIGINT }, size: { type: DataTypes.BIGINT },
imdbId: { type: DataTypes.STRING(32) }, imdbId: { type: DataTypes.STRING(32) },
imdbSeason: { type: DataTypes.INTEGER }, imdbSeason: { type: DataTypes.INTEGER },
imdbEpisode: { type: DataTypes.INTEGER }, imdbEpisode: { type: DataTypes.INTEGER },
kitsuId: { type: DataTypes.INTEGER }, kitsuId: { type: DataTypes.INTEGER },
kitsuEpisode: { type: DataTypes.INTEGER } kitsuEpisode: { type: DataTypes.INTEGER }
}, },
{ {
indexes: [ indexes: [
{ {
unique: true, unique: true,
name: 'files_unique_file_constraint', name: 'files_unique_file_constraint',
fields: [ fields: [
col('infoHash'), col('infoHash'),
fn('COALESCE', (col('fileIndex')), -1), fn('COALESCE', (col('fileIndex')), -1),
fn('COALESCE', (col('imdbId')), 'null'), fn('COALESCE', (col('imdbId')), 'null'),
fn('COALESCE', (col('imdbSeason')), -1), fn('COALESCE', (col('imdbSeason')), -1),
fn('COALESCE', (col('imdbEpisode')), -1), fn('COALESCE', (col('imdbEpisode')), -1),
fn('COALESCE', (col('kitsuId')), -1), fn('COALESCE', (col('kitsuId')), -1),
fn('COALESCE', (col('kitsuEpisode')), -1) fn('COALESCE', (col('kitsuEpisode')), -1)
] ]
}, },
{ unique: false, fields: ['imdbId', 'imdbSeason', 'imdbEpisode'] }, { unique: false, fields: ['imdbId', 'imdbSeason', 'imdbEpisode'] },
{ unique: false, fields: ['kitsuId', 'kitsuEpisode'] } { unique: false, fields: ['kitsuId', 'kitsuEpisode'] }
] ]
} }
); );
const Subtitle = database.define('subtitle', const Subtitle = database.define('subtitle',
{ {
infoHash: { infoHash: {
type: DataTypes.STRING(64), type: DataTypes.STRING(64),
allowNull: false, allowNull: false,
references: { model: Torrent, key: 'infoHash' }, references: { model: Torrent, key: 'infoHash' },
onDelete: 'CASCADE' onDelete: 'CASCADE'
}, },
fileIndex: { fileIndex: {
type: DataTypes.INTEGER, type: DataTypes.INTEGER,
allowNull: false allowNull: false
}, },
fileId: { fileId: {
type: DataTypes.BIGINT, type: DataTypes.BIGINT,
allowNull: true, allowNull: true,
references: { model: File, key: 'id' }, references: { model: File, key: 'id' },
onDelete: 'SET NULL' onDelete: 'SET NULL'
}, },
title: { type: DataTypes.STRING(512), allowNull: false }, title: { type: DataTypes.STRING(512), allowNull: false },
}, },
{ {
timestamps: false, timestamps: false,
indexes: [ indexes: [
{ {
unique: true, unique: true,
name: 'subtitles_unique_subtitle_constraint', name: 'subtitles_unique_subtitle_constraint',
fields: [ fields: [
col('infoHash'), col('infoHash'),
col('fileIndex'), col('fileIndex'),
fn('COALESCE', (col('fileId')), -1) fn('COALESCE', (col('fileId')), -1)
] ]
}, },
{ unique: false, fields: ['fileId'] } { unique: false, fields: ['fileId'] }
] ]
} }
); );
const Content = database.define('content', const Content = database.define('content',
{ {
infoHash: { infoHash: {
type: DataTypes.STRING(64), type: DataTypes.STRING(64),
primaryKey: true, primaryKey: true,
allowNull: false, allowNull: false,
references: { model: Torrent, key: 'infoHash' }, references: { model: Torrent, key: 'infoHash' },
onDelete: 'CASCADE' onDelete: 'CASCADE'
}, },
fileIndex: { fileIndex: {
type: DataTypes.INTEGER, type: DataTypes.INTEGER,
primaryKey: true, primaryKey: true,
allowNull: false allowNull: false
}, },
path: { type: DataTypes.STRING(512), allowNull: false }, path: { type: DataTypes.STRING(512), allowNull: false },
size: { type: DataTypes.BIGINT }, size: { type: DataTypes.BIGINT },
}, },
{ {
timestamps: false, timestamps: false,
} }
); );
const SkipTorrent = database.define('skip_torrent', { const SkipTorrent = database.define('skip_torrent', {
infoHash: { type: DataTypes.STRING(64), primaryKey: true }, infoHash: { type: DataTypes.STRING(64), primaryKey: true },
}); });
Torrent.hasMany(File, { foreignKey: 'infoHash', constraints: false }); Torrent.hasMany(File, { foreignKey: 'infoHash', constraints: false });
@@ -183,39 +184,39 @@ File.hasMany(Subtitle, { foreignKey: 'fileId', constraints: false });
Subtitle.belongsTo(File, { foreignKey: 'fileId', constraints: false }); Subtitle.belongsTo(File, { foreignKey: 'fileId', constraints: false });
export function connect() { export function connect() {
if (databaseConfig.ENABLE_SYNC) { if (databaseConfig.ENABLE_SYNC) {
return database.sync({ alter: true }) return database.sync({ alter: true })
.catch(error => { .catch(error => {
logger.error('Failed syncing database: ', error); console.error('Failed syncing database: ', error);
throw error; throw error;
}); });
} }
return Promise.resolve(); return Promise.resolve();
} }
export function getProvider(provider) { export function getProvider(provider) {
return Provider.findOrCreate({ where: { name: { [Op.eq]: provider.name } }, defaults: provider }) return Provider.findOrCreate({ where: { name: { [Op.eq]: provider.name } }, defaults: provider })
.then((result) => result[0]) .then((result) => result[0])
.catch(() => provider); .catch(() => provider);
} }
export function getTorrent(torrent) { export function getTorrent(torrent) {
const where = torrent.infoHash const where = torrent.infoHash
? { infoHash: torrent.infoHash } ? { infoHash: torrent.infoHash }
: { provider: torrent.provider, torrentId: torrent.torrentId } : { provider: torrent.provider, torrentId: torrent.torrentId }
return Torrent.findOne({ where: where }); return Torrent.findOne({ where: where });
} }
export function getTorrentsBasedOnTitle(titleQuery, type) { export function getTorrentsBasedOnTitle(titleQuery, type) {
return getTorrentsBasedOnQuery({ title: { [Op.regexp]: `${titleQuery}` }, type: type }); return getTorrentsBasedOnQuery({ title: { [Op.regexp]: `${titleQuery}` }, type: type });
} }
export function getTorrentsBasedOnQuery(where) { export function getTorrentsBasedOnQuery(where) {
return Torrent.findAll({ where: where }); return Torrent.findAll({ where: where });
} }
export function getFilesBasedOnQuery(where) { export function getFilesBasedOnQuery(where) {
return File.findAll({ where: where }); return File.findAll({ where: where });
} }
export function getUnprocessedIngestedTorrents() { export function getUnprocessedIngestedTorrents() {
@@ -226,7 +227,7 @@ export function getUnprocessedIngestedTorrents() {
[Op.or]: ['tv', 'movies'] [Op.or]: ['tv', 'movies']
} }
}, },
}); });
} }
@@ -239,142 +240,142 @@ export function setIngestedTorrentsProcessed(ingestedTorrents) {
} }
export function getTorrentsWithoutSize() { export function getTorrentsWithoutSize() {
return Torrent.findAll({ return Torrent.findAll({
where: literal( where: literal(
'exists (select 1 from files where files."infoHash" = torrent."infoHash" and files.size = 300000000)'), 'exists (select 1 from files where files."infoHash" = torrent."infoHash" and files.size = 300000000)'),
order: [ order: [
['seeders', 'DESC'] ['seeders', 'DESC']
] ]
}); });
} }
export function getUpdateSeedersTorrents(limit = 50) { export function getUpdateSeedersTorrents(limit = 50) {
const until = moment().subtract(7, 'days').format('YYYY-MM-DD'); const until = moment().subtract(7, 'days').format('YYYY-MM-DD');
return Torrent.findAll({ return Torrent.findAll({
where: literal(`torrent."updatedAt" < '${until}'`), where: literal(`torrent."updatedAt" < '${until}'`),
limit: limit, limit: limit,
order: [ order: [
['seeders', 'DESC'], ['seeders', 'DESC'],
['updatedAt', 'ASC'] ['updatedAt', 'ASC']
] ]
}); });
} }
export function getUpdateSeedersNewTorrents(limit = 50) { export function getUpdateSeedersNewTorrents(limit = 50) {
const lastUpdate = moment().subtract(12, 'hours').format('YYYY-MM-DD'); const lastUpdate = moment().subtract(12, 'hours').format('YYYY-MM-DD');
const createdAfter = moment().subtract(4, 'days').format('YYYY-MM-DD'); const createdAfter = moment().subtract(4, 'days').format('YYYY-MM-DD');
return Torrent.findAll({ return Torrent.findAll({
where: literal(`torrent."updatedAt" < '${lastUpdate}' AND torrent."createdAt" > '${createdAfter}'`), where: literal(`torrent."updatedAt" < '${lastUpdate}' AND torrent."createdAt" > '${createdAfter}'`),
limit: limit, limit: limit,
order: [ order: [
['seeders', 'ASC'], ['seeders', 'ASC'],
['updatedAt', 'ASC'] ['updatedAt', 'ASC']
] ]
}); });
} }
export function getNoContentsTorrents() { export function getNoContentsTorrents() {
return Torrent.findAll({ return Torrent.findAll({
where: { opened: false, seeders: { [Op.gte]: 1 } }, where: { opened: false, seeders: { [Op.gte]: 1 } },
limit: 500, limit: 500,
order: [[fn('RANDOM')]] order: [[fn('RANDOM')]]
}); });
} }
export function createTorrent(torrent) { export function createTorrent(torrent) {
return Torrent.upsert(torrent) return Torrent.upsert(torrent)
.then(() => createContents(torrent.infoHash, torrent.contents)) .then(() => createContents(torrent.infoHash, torrent.contents))
.then(() => createSubtitles(torrent.infoHash, torrent.subtitles)); .then(() => createSubtitles(torrent.infoHash, torrent.subtitles));
} }
export function setTorrentSeeders(torrent, seeders) { export function setTorrentSeeders(torrent, seeders) {
const where = torrent.infoHash const where = torrent.infoHash
? { infoHash: torrent.infoHash } ? { infoHash: torrent.infoHash }
: { provider: torrent.provider, torrentId: torrent.torrentId } : { provider: torrent.provider, torrentId: torrent.torrentId }
return Torrent.update( return Torrent.update(
{ seeders: seeders }, { seeders: seeders },
{ where: where } { where: where }
); );
} }
export function deleteTorrent(torrent) { export function deleteTorrent(torrent) {
return Torrent.destroy({ where: { infoHash: torrent.infoHash } }) return Torrent.destroy({ where: { infoHash: torrent.infoHash } })
} }
export function createFile(file) { export function createFile(file) {
if (file.id) { if (file.id) {
return (file.dataValues ? file.save() : File.upsert(file)) return (file.dataValues ? file.save() : File.upsert(file))
.then(() => upsertSubtitles(file, file.subtitles)); .then(() => upsertSubtitles(file, file.subtitles));
} }
if (file.subtitles && file.subtitles.length) { if (file.subtitles && file.subtitles.length) {
file.subtitles = file.subtitles.map(subtitle => ({ infoHash: file.infoHash, title: subtitle.path, ...subtitle })); file.subtitles = file.subtitles.map(subtitle => ({ infoHash: file.infoHash, title: subtitle.path, ...subtitle }));
} }
return File.create(file, { include: [Subtitle], ignoreDuplicates: true }); return File.create(file, { include: [Subtitle], ignoreDuplicates: true });
} }
export function getFiles(torrent) { export function getFiles(torrent) {
return File.findAll({ where: { infoHash: torrent.infoHash } }); return File.findAll({ where: { infoHash: torrent.infoHash } });
} }
export function getFilesBasedOnTitle(titleQuery) { export function getFilesBasedOnTitle(titleQuery) {
return File.findAll({ where: { title: { [Op.regexp]: `${titleQuery}` } } }); return File.findAll({ where: { title: { [Op.regexp]: `${titleQuery}` } } });
} }
export function deleteFile(file) { export function deleteFile(file) {
return File.destroy({ where: { id: file.id } }) return File.destroy({ where: { id: file.id } })
} }
export function createSubtitles(infoHash, subtitles) { export function createSubtitles(infoHash, subtitles) {
if (subtitles && subtitles.length) { if (subtitles && subtitles.length) {
return Subtitle.bulkCreate(subtitles.map(subtitle => ({ infoHash, title: subtitle.path, ...subtitle }))); return Subtitle.bulkCreate(subtitles.map(subtitle => ({ infoHash, title: subtitle.path, ...subtitle })));
} }
return Promise.resolve(); return Promise.resolve();
} }
export function upsertSubtitles(file, subtitles) { export function upsertSubtitles(file, subtitles) {
if (file.id && subtitles && subtitles.length) { if (file.id && subtitles && subtitles.length) {
return Promises.sequence(subtitles return Promises.sequence(subtitles
.map(subtitle => { .map(subtitle => {
subtitle.fileId = file.id; subtitle.fileId = file.id;
subtitle.infoHash = subtitle.infoHash || file.infoHash; subtitle.infoHash = subtitle.infoHash || file.infoHash;
subtitle.title = subtitle.title || subtitle.path; subtitle.title = subtitle.title || subtitle.path;
return subtitle; return subtitle;
}) })
.map(subtitle => () => subtitle.dataValues ? subtitle.save() : Subtitle.create(subtitle))); .map(subtitle => () => subtitle.dataValues ? subtitle.save() : Subtitle.create(subtitle)));
} }
return Promise.resolve(); return Promise.resolve();
} }
export function getSubtitles(torrent) { export function getSubtitles(torrent) {
return Subtitle.findAll({ where: { infoHash: torrent.infoHash } }); return Subtitle.findAll({ where: { infoHash: torrent.infoHash } });
} }
export function getUnassignedSubtitles() { export function getUnassignedSubtitles() {
return Subtitle.findAll({ where: { fileId: null } }); return Subtitle.findAll({ where: { fileId: null } });
} }
export function createContents(infoHash, contents) { export function createContents(infoHash, contents) {
if (contents && contents.length) { if (contents && contents.length) {
return Content.bulkCreate(contents.map(content => ({ infoHash, ...content })), { ignoreDuplicates: true }) return Content.bulkCreate(contents.map(content => ({ infoHash, ...content })), { ignoreDuplicates: true })
.then(() => Torrent.update({ opened: true }, { where: { infoHash: infoHash }, silent: true })); .then(() => Torrent.update({ opened: true }, { where: { infoHash: infoHash }, silent: true }));
} }
return Promise.resolve(); return Promise.resolve();
} }
export function getContents(torrent) { export function getContents(torrent) {
return Content.findAll({ where: { infoHash: torrent.infoHash } }); return Content.findAll({ where: { infoHash: torrent.infoHash } });
} }
export function getSkipTorrent(torrent) { export function getSkipTorrent(torrent) {
return SkipTorrent.findByPk(torrent.infoHash) return SkipTorrent.findByPk(torrent.infoHash)
.then((result) => { .then((result) => {
if (!result) { if (!result) {
throw new Error(`torrent not found: ${torrent.infoHash}`); throw new Error(`torrent not found: ${torrent.infoHash}`);
} }
return result.dataValues; return result.dataValues;
}) })
} }
export function createSkipTorrent(torrent) { export function createSkipTorrent(torrent) {
return SkipTorrent.upsert({ infoHash: torrent.infoHash }); return SkipTorrent.upsert({ infoHash: torrent.infoHash });
} }

View File

@@ -1,6 +1,5 @@
{ {
"ScrapeConfiguration": { "ScrapeConfiguration": {
"StorageConnectionString": "",
"Scrapers": [ "Scrapers": [
{ {
"Name": "SyncEzTvJob", "Name": "SyncEzTvJob",

View File

@@ -1,3 +1,5 @@
using Producer.Models.Configuration;
namespace Producer.Crawlers.Sites; namespace Producer.Crawlers.Sites;
public partial class DebridMediaManagerCrawler( public partial class DebridMediaManagerCrawler(

View File

@@ -1,3 +1,5 @@
using Producer.Models.Configuration;
namespace Producer.Extensions; namespace Producer.Extensions;
public static class ConfigurationExtensions public static class ConfigurationExtensions

View File

@@ -1,3 +1,5 @@
using Producer.Models.Configuration;
namespace Producer.Extensions; namespace Producer.Extensions;
public static class ServiceCollectionExtensions public static class ServiceCollectionExtensions
@@ -20,6 +22,7 @@ public static class ServiceCollectionExtensions
internal static IServiceCollection AddDataStorage(this IServiceCollection services) internal static IServiceCollection AddDataStorage(this IServiceCollection services)
{ {
services.LoadConfigurationFromEnv<PostgresConfiguration>();
services.AddTransient<IDataStorage, DapperDataStorage>(); services.AddTransient<IDataStorage, DapperDataStorage>();
services.AddTransient<IMessagePublisher, TorrentPublisher>(); services.AddTransient<IMessagePublisher, TorrentPublisher>();
return services; return services;
@@ -36,9 +39,9 @@ public static class ServiceCollectionExtensions
services.AddMassTransit(busConfigurator => services.AddMassTransit(busConfigurator =>
{ {
busConfigurator.SetKebabCaseEndpointNameFormatter(); busConfigurator.SetKebabCaseEndpointNameFormatter();
busConfigurator.UsingRabbitMq((context, busFactoryConfigurator) => busConfigurator.UsingRabbitMq((_, busFactoryConfigurator) =>
{ {
busFactoryConfigurator.Host(rabbitConfig!.Host, hostConfigurator => busFactoryConfigurator.Host(rabbitConfig.Host, hostConfigurator =>
{ {
hostConfigurator.Username(rabbitConfig.Username); hostConfigurator.Username(rabbitConfig.Username);
hostConfigurator.Password(rabbitConfig.Password); hostConfigurator.Password(rabbitConfig.Password);
@@ -51,9 +54,9 @@ public static class ServiceCollectionExtensions
internal static IServiceCollection AddQuartz(this IServiceCollection services, IConfiguration configuration) internal static IServiceCollection AddQuartz(this IServiceCollection services, IConfiguration configuration)
{ {
var scrapeConfiguration = LoadScrapeConfiguration(services, configuration); var scrapeConfiguration = services.LoadConfigurationFromConfig<ScrapeConfiguration>(configuration, ScrapeConfiguration.SectionName);
var githubConfiguration = LoadGithubConfiguration(services, configuration); var githubConfiguration = services.LoadConfigurationFromConfig<GithubConfiguration>(configuration, GithubConfiguration.SectionName);
var rabbitConfig = LoadRabbitMQConfiguration(services, configuration); var rabbitConfig = services.LoadConfigurationFromConfig<RabbitMqConfiguration>(configuration, RabbitMqConfiguration.SectionName);
services services
.AddTransient<SyncEzTvJob>() .AddTransient<SyncEzTvJob>()
@@ -92,46 +95,29 @@ public static class ServiceCollectionExtensions
return services; return services;
} }
private static GithubConfiguration LoadGithubConfiguration(IServiceCollection services, IConfiguration configuration) private static TConfiguration LoadConfigurationFromConfig<TConfiguration>(this IServiceCollection services, IConfiguration configuration, string sectionName)
where TConfiguration : class
{ {
var githubConfiguration = configuration.GetSection(GithubConfiguration.SectionName).Get<GithubConfiguration>(); var instance = configuration.GetSection(sectionName).Get<TConfiguration>();
ArgumentNullException.ThrowIfNull(githubConfiguration, nameof(githubConfiguration)); ArgumentNullException.ThrowIfNull(instance, nameof(instance));
services.TryAddSingleton(githubConfiguration);
return githubConfiguration; services.TryAddSingleton(instance);
return instance;
} }
private static RabbitMqConfiguration LoadRabbitMQConfiguration(IServiceCollection services, IConfiguration configuration) private static TConfiguration LoadConfigurationFromEnv<TConfiguration>(this IServiceCollection services)
where TConfiguration : class
{ {
var rabbitConfiguration = configuration.GetSection(RabbitMqConfiguration.SectionName).Get<RabbitMqConfiguration>(); var instance = Activator.CreateInstance<TConfiguration>();
ArgumentNullException.ThrowIfNull(rabbitConfiguration, nameof(rabbitConfiguration));
if (rabbitConfiguration.MaxQueueSize > 0)
{
if (rabbitConfiguration.MaxPublishBatchSize > rabbitConfiguration.MaxQueueSize)
{
throw new InvalidOperationException("MaxPublishBatchSize cannot be greater than MaxQueueSize in RabbitMqConfiguration");
}
}
services.TryAddSingleton(rabbitConfiguration);
return rabbitConfiguration;
}
private static ScrapeConfiguration LoadScrapeConfiguration(IServiceCollection services, IConfiguration configuration)
{
var scrapeConfiguration = configuration.GetSection(ScrapeConfiguration.SectionName).Get<ScrapeConfiguration>();
ArgumentNullException.ThrowIfNull(scrapeConfiguration, nameof(scrapeConfiguration)); ArgumentNullException.ThrowIfNull(instance, nameof(instance));
services.TryAddSingleton(scrapeConfiguration); services.TryAddSingleton(instance);
return scrapeConfiguration; return instance;
} }
private static void AddJobWithTrigger<TJobType>( private static void AddJobWithTrigger<TJobType>(

View File

@@ -2,6 +2,7 @@
global using System.Text; global using System.Text;
global using System.Text.Json; global using System.Text.Json;
global using System.Text.Json.Serialization;
global using System.Text.RegularExpressions; global using System.Text.RegularExpressions;
global using System.Xml.Linq; global using System.Xml.Linq;
global using Dapper; global using Dapper;

View File

@@ -1,4 +1,4 @@
namespace Producer.Models; namespace Producer.Models.Configuration;
public class GithubConfiguration public class GithubConfiguration
{ {

View File

@@ -0,0 +1,29 @@
namespace Producer.Models.Configuration;
public class PostgresConfiguration
{
private const string Prefix = "POSTGRES";
private const string HostVariable = "HOST";
private const string UsernameVariable = "USER";
private const string PasswordVariable = "PASSWORD";
private const string DatabaseVariable = "DB";
private const string PortVariable = "PORT";
private string Host { get; init; } = Environment.GetEnvironmentVariable($"{Prefix}_{HostVariable}") ??
throw new InvalidOperationException($"Environment variable {Prefix}_{HostVariable} is not set");
private string Username { get; init; } = Environment.GetEnvironmentVariable($"{Prefix}_{UsernameVariable}") ??
throw new InvalidOperationException($"Environment variable {Prefix}_{UsernameVariable} is not set");
private string Password { get; init; } = Environment.GetEnvironmentVariable($"{Prefix}_{PasswordVariable}") ??
throw new InvalidOperationException($"Environment variable {Prefix}_{PasswordVariable} is not set");
private string Database { get; init; } = Environment.GetEnvironmentVariable($"{Prefix}_{DatabaseVariable}") ??
throw new InvalidOperationException($"Environment variable {Prefix}_{DatabaseVariable} is not set");
private int PORT { get; init; } = int.Parse(
Environment.GetEnvironmentVariable($"{Prefix}_{PortVariable}") ??
throw new InvalidOperationException($"Environment variable {Prefix}_{PortVariable} is not set"));
public string StorageConnectionString => $"Host={Host};Port={PORT};Username={Username};Password={Password};Database={Database};";
}

View File

@@ -0,0 +1,39 @@
namespace Producer.Models.Configuration;
public class RabbitMqConfiguration
{
public const string SectionName = "RabbitMqConfiguration";
public const string Filename = "rabbitmq.json";
public string? Host { get; set; }
public string? Username { get; set; }
public string? Password { get; set; }
public string? QueueName { get; set; }
public bool Durable { get; set; }
public int MaxQueueSize { get; set; }
public int MaxPublishBatchSize { get; set; } = 500;
public int PublishIntervalInSeconds { get; set; } = 1000 * 10;
public void Validate()
{
if (MaxQueueSize == 0)
{
return;
}
if (MaxQueueSize < 0)
{
throw new InvalidOperationException("MaxQueueSize cannot be less than 0 in RabbitMqConfiguration");
}
if (MaxPublishBatchSize < 0)
{
throw new InvalidOperationException("MaxPublishBatchSize cannot be less than 0 in RabbitMqConfiguration");
}
if (MaxPublishBatchSize > MaxQueueSize)
{
throw new InvalidOperationException("MaxPublishBatchSize cannot be greater than MaxQueueSize in RabbitMqConfiguration");
}
}
}

View File

@@ -1,4 +1,4 @@
namespace Producer.Models; namespace Producer.Models.Configuration;
public class ScrapeConfiguration public class ScrapeConfiguration
{ {
@@ -6,5 +6,4 @@ public class ScrapeConfiguration
public const string Filename = "scrapers.json"; public const string Filename = "scrapers.json";
public List<Scraper> Scrapers { get; set; } = []; public List<Scraper> Scrapers { get; set; } = [];
public string StorageConnectionString { get; set; } = "";
} }

View File

@@ -1,16 +0,0 @@
namespace Producer.Models;
public class RabbitMqConfiguration
{
public const string SectionName = "RabbitMqConfiguration";
public const string Filename = "rabbitmq.json";
public string? Host { get; set; }
public string? Username { get; set; }
public string? Password { get; set; }
public string? QueueName { get; set; }
public bool Durable { get; set; }
public int MaxQueueSize { get; set; }
public int MaxPublishBatchSize { get; set; } = 500;
public int PublishIntervalInSeconds { get; set; } = 1000 * 10;
}

View File

@@ -40,6 +40,7 @@
<None Include="Configuration\github.json"> <None Include="Configuration\github.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory> <CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None> </None>
<Content Remove="Configuration\postgres.json" />
</ItemGroup> </ItemGroup>
</Project> </Project>

View File

@@ -1,6 +1,8 @@
using Producer.Models.Configuration;
namespace Producer.Services; namespace Producer.Services;
public class DapperDataStorage(ScrapeConfiguration configuration, RabbitMqConfiguration rabbitConfig, ILogger<DapperDataStorage> logger) : IDataStorage public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConfiguration rabbitConfig, ILogger<DapperDataStorage> logger) : IDataStorage
{ {
private const string InsertTorrentSql = private const string InsertTorrentSql =
""" """

View File

@@ -1,4 +1,6 @@
namespace Producer.Services; using Producer.Models.Configuration;
namespace Producer.Services;
public class TorrentPublisher( public class TorrentPublisher(
ISendEndpointProvider sendEndpointProvider, ISendEndpointProvider sendEndpointProvider,