Merge and simplify the environment variables in
addon and consumer. Todo: producer Change DATABASE_URI to be generic POSTGRES variables DOES NOT WORK - First pass at upgrading environment variables PostgreSQL environment variables have been split for addon and consumer. ENABLE_SYNC hard coded as `true` MongoDB variables update. Make the addon code more similar to the consumer code Get some parity between addon and consumer
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
import cacheManager from 'cache-manager';
|
||||
import mangodbStore from 'cache-manager-mongodb';
|
||||
import { isStaticUrl } from '../moch/static.js';
|
||||
import { cacheConfig } from './config.js';
|
||||
import { isStaticUrl } from '../moch/static.js';
|
||||
|
||||
const GLOBAL_KEY_PREFIX = 'knightcrawler-addon';
|
||||
const STREAM_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|stream`;
|
||||
@@ -14,90 +15,87 @@ const AVAILABILITY_EMPTY_TTL = 30 * 60; // 30 minutes
|
||||
const MESSAGE_VIDEO_URL_TTL = 60; // 1 minutes
|
||||
// When the streams are empty we want to cache it for less time in case of timeouts or failures
|
||||
|
||||
const MONGO_URI = process.env.MONGODB_URI;
|
||||
const NO_CACHE = process.env.NO_CACHE || false;
|
||||
|
||||
const memoryCache = initiateMemoryCache();
|
||||
const remoteCache = initiateRemoteCache();
|
||||
|
||||
function initiateRemoteCache() {
|
||||
if (NO_CACHE) {
|
||||
return null;
|
||||
} else if (MONGO_URI) {
|
||||
return cacheManager.caching({
|
||||
store: mangodbStore,
|
||||
uri: MONGO_URI,
|
||||
options: {
|
||||
collection: 'knightcrawler_addon_collection',
|
||||
socketTimeoutMS: 120000,
|
||||
useNewUrlParser: true,
|
||||
useUnifiedTopology: false,
|
||||
ttl: STREAM_EMPTY_TTL
|
||||
},
|
||||
ttl: STREAM_EMPTY_TTL,
|
||||
ignoreCacheErrors: true
|
||||
});
|
||||
} else {
|
||||
return cacheManager.caching({
|
||||
store: 'memory',
|
||||
ttl: STREAM_EMPTY_TTL
|
||||
});
|
||||
}
|
||||
if (cacheConfig.NO_CACHE) {
|
||||
return null;
|
||||
} else if (cacheConfig.MONGO_URI) {
|
||||
return cacheManager.caching({
|
||||
store: mangodbStore,
|
||||
uri: cacheConfig.MONGO_URI,
|
||||
options: {
|
||||
collection: 'knightcrawler_addon_collection',
|
||||
socketTimeoutMS: 120000,
|
||||
useNewUrlParser: true,
|
||||
useUnifiedTopology: false,
|
||||
ttl: STREAM_EMPTY_TTL
|
||||
},
|
||||
ttl: STREAM_EMPTY_TTL,
|
||||
ignoreCacheErrors: true
|
||||
});
|
||||
} else {
|
||||
return cacheManager.caching({
|
||||
store: 'memory',
|
||||
ttl: STREAM_EMPTY_TTL
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function initiateMemoryCache() {
|
||||
return cacheManager.caching({
|
||||
store: 'memory',
|
||||
ttl: MESSAGE_VIDEO_URL_TTL,
|
||||
max: Infinity // infinite LRU cache size
|
||||
});
|
||||
return cacheManager.caching({
|
||||
store: 'memory',
|
||||
ttl: MESSAGE_VIDEO_URL_TTL,
|
||||
max: Infinity // infinite LRU cache size
|
||||
});
|
||||
}
|
||||
|
||||
function cacheWrap(cache, key, method, options) {
|
||||
if (NO_CACHE || !cache) {
|
||||
return method();
|
||||
}
|
||||
return cache.wrap(key, method, options);
|
||||
if (cacheConfig.NO_CACHE || !cache) {
|
||||
return method();
|
||||
}
|
||||
return cache.wrap(key, method, options);
|
||||
}
|
||||
|
||||
export function cacheWrapStream(id, method) {
|
||||
return cacheWrap(remoteCache, `${STREAM_KEY_PREFIX}:${id}`, method, {
|
||||
ttl: (streams) => streams.length ? STREAM_TTL : STREAM_EMPTY_TTL
|
||||
});
|
||||
return cacheWrap(remoteCache, `${STREAM_KEY_PREFIX}:${id}`, method, {
|
||||
ttl: (streams) => streams.length ? STREAM_TTL : STREAM_EMPTY_TTL
|
||||
});
|
||||
}
|
||||
|
||||
export function cacheWrapResolvedUrl(id, method) {
|
||||
return cacheWrap(memoryCache, `${RESOLVED_URL_KEY_PREFIX}:${id}`, method, {
|
||||
ttl: (url) => isStaticUrl(url) ? MESSAGE_VIDEO_URL_TTL : STREAM_TTL
|
||||
});
|
||||
return cacheWrap(memoryCache, `${RESOLVED_URL_KEY_PREFIX}:${id}`, method, {
|
||||
ttl: (url) => isStaticUrl(url) ? MESSAGE_VIDEO_URL_TTL : STREAM_TTL
|
||||
});
|
||||
}
|
||||
|
||||
export function cacheAvailabilityResults(results) {
|
||||
Object.keys(results)
|
||||
.forEach(infoHash => {
|
||||
const key = `${AVAILABILITY_KEY_PREFIX}:${infoHash}`;
|
||||
const value = results[infoHash];
|
||||
const ttl = value?.length ? AVAILABILITY_TTL : AVAILABILITY_EMPTY_TTL;
|
||||
memoryCache.set(key, value, { ttl })
|
||||
});
|
||||
return results;
|
||||
Object.keys(results)
|
||||
.forEach(infoHash => {
|
||||
const key = `${AVAILABILITY_KEY_PREFIX}:${infoHash}`;
|
||||
const value = results[infoHash];
|
||||
const ttl = value?.length ? AVAILABILITY_TTL : AVAILABILITY_EMPTY_TTL;
|
||||
memoryCache.set(key, value, { ttl })
|
||||
});
|
||||
return results;
|
||||
}
|
||||
|
||||
export function getCachedAvailabilityResults(infoHashes) {
|
||||
const keys = infoHashes.map(infoHash => `${AVAILABILITY_KEY_PREFIX}:${infoHash}`)
|
||||
return new Promise(resolve => {
|
||||
memoryCache.mget(...keys, (error, result) => {
|
||||
if (error) {
|
||||
console.log('Failed retrieve availability cache', error)
|
||||
return resolve({});
|
||||
}
|
||||
const availabilityResults = {};
|
||||
infoHashes.forEach((infoHash, index) => {
|
||||
if (result[index]) {
|
||||
availabilityResults[infoHash] = result[index];
|
||||
}
|
||||
});
|
||||
resolve(availabilityResults);
|
||||
})
|
||||
});
|
||||
const keys = infoHashes.map(infoHash => `${AVAILABILITY_KEY_PREFIX}:${infoHash}`)
|
||||
return new Promise(resolve => {
|
||||
memoryCache.mget(...keys, (error, result) => {
|
||||
if (error) {
|
||||
console.log('Failed retrieve availability cache', error)
|
||||
return resolve({});
|
||||
}
|
||||
const availabilityResults = {};
|
||||
infoHashes.forEach((infoHash, index) => {
|
||||
if (result[index]) {
|
||||
availabilityResults[infoHash] = result[index];
|
||||
}
|
||||
});
|
||||
resolve(availabilityResults);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
38
src/node/addon/src/lib/config.js
Normal file
38
src/node/addon/src/lib/config.js
Normal file
@@ -0,0 +1,38 @@
|
||||
export const cacheConfig = {
|
||||
MONGODB_HOST: process.env.MONGODB_HOST || 'mongodb',
|
||||
MONGODB_PORT: process.env.MONGODB_PORT || '27017',
|
||||
MONGODB_DB: process.env.MONGODB_DB || 'selfhostio',
|
||||
MONGO_INITDB_ROOT_USERNAME: process.env.MONGO_INITDB_ROOT_USERNAME || 'mongo',
|
||||
MONGO_INITDB_ROOT_PASSWORD: process.env.MONGO_INITDB_ROOT_PASSWORD || 'mongo',
|
||||
COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'selfhostio_consumer_collection',
|
||||
NO_CACHE: parseBool(process.env.NO_CACHE, false),
|
||||
}
|
||||
|
||||
// Combine the environment variables into a connection string
|
||||
// The combined string will look something like:
|
||||
// 'mongodb://mongo:mongo@localhost:27017/selfhostio?authSource=admin'
|
||||
cacheConfig.MONGO_URI = 'mongodb://' + cacheConfig.MONGO_INITDB_ROOT_USERNAME + ':' + cacheConfig.MONGO_INITDB_ROOT_PASSWORD + '@' + cacheConfig.MONGODB_HOST + ':' + cacheConfig.MONGODB_PORT + '/' + cacheConfig.MONGODB_DB + '?authSource=admin';
|
||||
|
||||
export const databaseConfig = {
|
||||
POSTGRES_HOST: process.env.POSTGRES_HOST || 'postgres',
|
||||
POSTGRES_PORT: process.env.POSTGRES_PORT || '5432',
|
||||
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE || 'selfhostio',
|
||||
POSTGRES_USERNAME: process.env.POSTGRES_USERNAME || 'postgres',
|
||||
POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD || 'postgres',
|
||||
}
|
||||
|
||||
// Combine the environment variables into a connection string
|
||||
// The combined string will look something like:
|
||||
// 'postgres://postgres:postgres@localhost:5432/selfhostio'
|
||||
databaseConfig.POSTGRES_URI = 'postgres://' + databaseConfig.POSTGRES_USERNAME + ':' + databaseConfig.POSTGRES_PASSWORD + '@' + databaseConfig.POSTGRES_HOST + ':' + databaseConfig.POSTGRES_PORT + '/' + databaseConfig.POSTGRES_DATABASE;
|
||||
|
||||
|
||||
function parseBool(boolString, defaultValue) {
|
||||
const isString = typeof boolString === 'string' || boolString instanceof String;
|
||||
|
||||
if (!isString) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
return boolString.toLowerCase() === 'true' ? true : defaultValue;
|
||||
}
|
||||
@@ -1,64 +1,68 @@
|
||||
import { Sequelize } from 'sequelize';
|
||||
import { databaseConfig } from './config.js';
|
||||
|
||||
const { Op } = Sequelize;
|
||||
|
||||
const DATABASE_URI = process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/postgres';
|
||||
|
||||
const database = new Sequelize(DATABASE_URI, { logging: false });
|
||||
const database = new Sequelize(
|
||||
databaseConfig.POSTGRES_URI,
|
||||
{
|
||||
logging: false
|
||||
}
|
||||
);
|
||||
|
||||
const Torrent = database.define('torrent',
|
||||
{
|
||||
infoHash: { type: Sequelize.STRING(64), primaryKey: true },
|
||||
provider: { type: Sequelize.STRING(32), allowNull: false },
|
||||
torrentId: { type: Sequelize.STRING(128) },
|
||||
title: { type: Sequelize.STRING(256), allowNull: false },
|
||||
size: { type: Sequelize.BIGINT },
|
||||
type: { type: Sequelize.STRING(16), allowNull: false },
|
||||
uploadDate: { type: Sequelize.DATE, allowNull: false },
|
||||
seeders: { type: Sequelize.SMALLINT },
|
||||
trackers: { type: Sequelize.STRING(4096) },
|
||||
languages: { type: Sequelize.STRING(4096) },
|
||||
resolution: { type: Sequelize.STRING(16) }
|
||||
infoHash: { type: Sequelize.STRING(64), primaryKey: true },
|
||||
provider: { type: Sequelize.STRING(32), allowNull: false },
|
||||
torrentId: { type: Sequelize.STRING(128) },
|
||||
title: { type: Sequelize.STRING(256), allowNull: false },
|
||||
size: { type: Sequelize.BIGINT },
|
||||
type: { type: Sequelize.STRING(16), allowNull: false },
|
||||
uploadDate: { type: Sequelize.DATE, allowNull: false },
|
||||
seeders: { type: Sequelize.SMALLINT },
|
||||
trackers: { type: Sequelize.STRING(4096) },
|
||||
languages: { type: Sequelize.STRING(4096) },
|
||||
resolution: { type: Sequelize.STRING(16) }
|
||||
}
|
||||
);
|
||||
|
||||
const File = database.define('file',
|
||||
{
|
||||
id: { type: Sequelize.BIGINT, autoIncrement: true, primaryKey: true },
|
||||
infoHash: {
|
||||
type: Sequelize.STRING(64),
|
||||
allowNull: false,
|
||||
references: { model: Torrent, key: 'infoHash' },
|
||||
onDelete: 'CASCADE'
|
||||
},
|
||||
fileIndex: { type: Sequelize.INTEGER },
|
||||
title: { type: Sequelize.STRING(256), allowNull: false },
|
||||
size: { type: Sequelize.BIGINT },
|
||||
imdbId: { type: Sequelize.STRING(32) },
|
||||
imdbSeason: { type: Sequelize.INTEGER },
|
||||
imdbEpisode: { type: Sequelize.INTEGER },
|
||||
kitsuId: { type: Sequelize.INTEGER },
|
||||
kitsuEpisode: { type: Sequelize.INTEGER }
|
||||
id: { type: Sequelize.BIGINT, autoIncrement: true, primaryKey: true },
|
||||
infoHash: {
|
||||
type: Sequelize.STRING(64),
|
||||
allowNull: false,
|
||||
references: { model: Torrent, key: 'infoHash' },
|
||||
onDelete: 'CASCADE'
|
||||
},
|
||||
fileIndex: { type: Sequelize.INTEGER },
|
||||
title: { type: Sequelize.STRING(256), allowNull: false },
|
||||
size: { type: Sequelize.BIGINT },
|
||||
imdbId: { type: Sequelize.STRING(32) },
|
||||
imdbSeason: { type: Sequelize.INTEGER },
|
||||
imdbEpisode: { type: Sequelize.INTEGER },
|
||||
kitsuId: { type: Sequelize.INTEGER },
|
||||
kitsuEpisode: { type: Sequelize.INTEGER }
|
||||
},
|
||||
);
|
||||
|
||||
const Subtitle = database.define('subtitle',
|
||||
{
|
||||
infoHash: {
|
||||
type: Sequelize.STRING(64),
|
||||
allowNull: false,
|
||||
references: { model: Torrent, key: 'infoHash' },
|
||||
onDelete: 'CASCADE'
|
||||
},
|
||||
fileIndex: { type: Sequelize.INTEGER, allowNull: false },
|
||||
fileId: {
|
||||
type: Sequelize.BIGINT,
|
||||
allowNull: true,
|
||||
references: { model: File, key: 'id' },
|
||||
onDelete: 'SET NULL'
|
||||
},
|
||||
title: { type: Sequelize.STRING(512), allowNull: false },
|
||||
size: { type: Sequelize.BIGINT, allowNull: false },
|
||||
infoHash: {
|
||||
type: Sequelize.STRING(64),
|
||||
allowNull: false,
|
||||
references: { model: Torrent, key: 'infoHash' },
|
||||
onDelete: 'CASCADE'
|
||||
},
|
||||
fileIndex: { type: Sequelize.INTEGER, allowNull: false },
|
||||
fileId: {
|
||||
type: Sequelize.BIGINT,
|
||||
allowNull: true,
|
||||
references: { model: File, key: 'id' },
|
||||
onDelete: 'SET NULL'
|
||||
},
|
||||
title: { type: Sequelize.STRING(512), allowNull: false },
|
||||
size: { type: Sequelize.BIGINT, allowNull: false },
|
||||
},
|
||||
{ timestamps: false }
|
||||
);
|
||||
@@ -69,66 +73,66 @@ File.hasMany(Subtitle, { foreignKey: 'fileId', constraints: false });
|
||||
Subtitle.belongsTo(File, { foreignKey: 'fileId', constraints: false });
|
||||
|
||||
export function getTorrent(infoHash) {
|
||||
return Torrent.findOne({ where: { infoHash: infoHash } });
|
||||
return Torrent.findOne({ where: { infoHash: infoHash } });
|
||||
}
|
||||
|
||||
export function getFiles(infoHashes) {
|
||||
return File.findAll({ where: { infoHash: { [Op.in]: infoHashes} } });
|
||||
return File.findAll({ where: { infoHash: { [Op.in]: infoHashes } } });
|
||||
}
|
||||
|
||||
export function getImdbIdMovieEntries(imdbId) {
|
||||
return File.findAll({
|
||||
where: {
|
||||
imdbId: { [Op.eq]: imdbId }
|
||||
},
|
||||
include: [Torrent],
|
||||
limit: 500,
|
||||
order: [
|
||||
[Torrent, 'size', 'DESC']
|
||||
]
|
||||
});
|
||||
return File.findAll({
|
||||
where: {
|
||||
imdbId: { [Op.eq]: imdbId }
|
||||
},
|
||||
include: [Torrent],
|
||||
limit: 500,
|
||||
order: [
|
||||
[Torrent, 'size', 'DESC']
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
export function getImdbIdSeriesEntries(imdbId, season, episode) {
|
||||
return File.findAll({
|
||||
where: {
|
||||
imdbId: { [Op.eq]: imdbId },
|
||||
imdbSeason: { [Op.eq]: season },
|
||||
imdbEpisode: { [Op.eq]: episode }
|
||||
},
|
||||
include: [Torrent],
|
||||
limit: 500,
|
||||
order: [
|
||||
[Torrent, 'size', 'DESC']
|
||||
]
|
||||
});
|
||||
return File.findAll({
|
||||
where: {
|
||||
imdbId: { [Op.eq]: imdbId },
|
||||
imdbSeason: { [Op.eq]: season },
|
||||
imdbEpisode: { [Op.eq]: episode }
|
||||
},
|
||||
include: [Torrent],
|
||||
limit: 500,
|
||||
order: [
|
||||
[Torrent, 'size', 'DESC']
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
export function getKitsuIdMovieEntries(kitsuId) {
|
||||
return File.findAll({
|
||||
where: {
|
||||
kitsuId: { [Op.eq]: kitsuId }
|
||||
},
|
||||
include: [Torrent],
|
||||
limit: 500,
|
||||
order: [
|
||||
[Torrent, 'size', 'DESC']
|
||||
]
|
||||
});
|
||||
return File.findAll({
|
||||
where: {
|
||||
kitsuId: { [Op.eq]: kitsuId }
|
||||
},
|
||||
include: [Torrent],
|
||||
limit: 500,
|
||||
order: [
|
||||
[Torrent, 'size', 'DESC']
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
export function getKitsuIdSeriesEntries(kitsuId, episode) {
|
||||
return File.findAll({
|
||||
where: {
|
||||
kitsuId: { [Op.eq]: kitsuId },
|
||||
kitsuEpisode: { [Op.eq]: episode }
|
||||
},
|
||||
include: [Torrent],
|
||||
limit: 500,
|
||||
order: [
|
||||
[Torrent, 'size', 'DESC']
|
||||
]
|
||||
});
|
||||
return File.findAll({
|
||||
where: {
|
||||
kitsuId: { [Op.eq]: kitsuId },
|
||||
kitsuEpisode: { [Op.eq]: episode }
|
||||
},
|
||||
include: [Torrent],
|
||||
limit: 500,
|
||||
order: [
|
||||
[Torrent, 'size', 'DESC']
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -4,16 +4,34 @@
|
||||
}
|
||||
|
||||
export const cacheConfig = {
|
||||
MONGO_URI: process.env.MONGODB_URI || 'mongodb://mongo:mongo@localhost:27017/knightcrawler?authSource=admin',
|
||||
MONGODB_HOST: process.env.MONGODB_HOST || 'mongodb',
|
||||
MONGODB_PORT: process.env.MONGODB_PORT || '27017',
|
||||
MONGODB_DB: process.env.MONGODB_DB || 'knightcrawler',
|
||||
MONGO_INITDB_ROOT_USERNAME: process.env.MONGO_INITDB_ROOT_USERNAME || 'mongo',
|
||||
MONGO_INITDB_ROOT_PASSWORD: process.env.MONGO_INITDB_ROOT_PASSWORD || 'mongo',
|
||||
NO_CACHE: parseBool(process.env.NO_CACHE, false),
|
||||
COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'knightcrawler_consumer_collection'
|
||||
}
|
||||
|
||||
// Combine the environment variables into a connection string
|
||||
// The combined string will look something like:
|
||||
// 'mongodb://mongo:mongo@localhost:27017/knightcrawler?authSource=admin'
|
||||
cacheConfig.MONGO_URI = 'mongodb://' + cacheConfig.MONGO_INITDB_ROOT_USERNAME + ':' + cacheConfig.MONGO_INITDB_ROOT_PASSWORD + '@' + cacheConfig.MONGODB_HOST + ':' + cacheConfig.MONGODB_PORT + '/' + cacheConfig.MONGODB_DB + '?authSource=admin';
|
||||
|
||||
export const databaseConfig = {
|
||||
DATABASE_URI: process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/knightcrawler',
|
||||
ENABLE_SYNC: parseBool(process.env.ENABLE_SYNC, true)
|
||||
POSTGRES_HOST: process.env.POSTGRES_HOST || 'postgres',
|
||||
POSTGRES_PORT: process.env.POSTGRES_PORT || '5432',
|
||||
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE || 'knightcrawler',
|
||||
POSTGRES_USERNAME: process.env.POSTGRES_USERNAME || 'postgres',
|
||||
POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD || 'postgres',
|
||||
ENABLE_SYNC: true
|
||||
}
|
||||
|
||||
// Combine the environment variables into a connection string
|
||||
// The combined string will look something like:
|
||||
// 'postgres://postgres:postgres@localhost:5432/knightcrawler'
|
||||
databaseConfig.POSTGRES_URI = 'postgres://' + databaseConfig.POSTGRES_USERNAME + ':' + databaseConfig.POSTGRES_PASSWORD + '@' + databaseConfig.POSTGRES_HOST + ':' + databaseConfig.POSTGRES_PORT + '/' + databaseConfig.POSTGRES_DATABASE;
|
||||
|
||||
export const jobConfig = {
|
||||
JOB_CONCURRENCY: parseInt(process.env.JOB_CONCURRENCY || 1),
|
||||
JOBS_ENABLED: parseBool(process.env.JOBS_ENABLED || true)
|
||||
@@ -36,10 +54,10 @@ export const torrentConfig = {
|
||||
|
||||
function parseBool(boolString, defaultValue) {
|
||||
const isString = typeof boolString === 'string' || boolString instanceof String;
|
||||
|
||||
|
||||
if (!isString) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
return boolString.toLowerCase() === 'true' ? true : defaultValue;
|
||||
|
||||
return boolString.toLowerCase() === 'true' ? true : defaultValue;
|
||||
}
|
||||
@@ -5,16 +5,16 @@ import {logger} from "./logger.js";
|
||||
import * as Promises from './promises.js';
|
||||
|
||||
const database = new Sequelize(
|
||||
databaseConfig.DATABASE_URI,
|
||||
databaseConfig.POSTGRES_URI,
|
||||
{
|
||||
logging: false
|
||||
logging: false
|
||||
}
|
||||
);
|
||||
|
||||
const Provider = database.define('provider', {
|
||||
name: { type: DataTypes.STRING(32), primaryKey: true },
|
||||
lastScraped: { type: DataTypes.DATE },
|
||||
lastScrapedId: { type: DataTypes.STRING(128) }
|
||||
name: { type: DataTypes.STRING(32), primaryKey: true },
|
||||
lastScraped: { type: DataTypes.DATE },
|
||||
lastScrapedId: { type: DataTypes.STRING(128) }
|
||||
});
|
||||
|
||||
const IngestedTorrent = database.define('ingested_torrent', {
|
||||
@@ -30,7 +30,8 @@ const IngestedTorrent = database.define('ingested_torrent', {
|
||||
processed: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
defaultValue: false
|
||||
}},
|
||||
}
|
||||
},
|
||||
{
|
||||
indexes: [
|
||||
{
|
||||
@@ -42,9 +43,9 @@ const IngestedTorrent = database.define('ingested_torrent', {
|
||||
|
||||
/* eslint-disable no-unused-vars */
|
||||
const IngestedPage = database.define('ingested_page', {
|
||||
id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true },
|
||||
url: { type: DataTypes.STRING, allowNull: false },
|
||||
},
|
||||
id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true },
|
||||
url: { type: DataTypes.STRING, allowNull: false },
|
||||
},
|
||||
{
|
||||
indexes: [
|
||||
{
|
||||
@@ -57,122 +58,122 @@ const IngestedPage = database.define('ingested_page', {
|
||||
|
||||
const Torrent = database.define('torrent',
|
||||
{
|
||||
infoHash: { type: DataTypes.STRING(64), primaryKey: true },
|
||||
provider: { type: DataTypes.STRING(32), allowNull: false },
|
||||
torrentId: { type: DataTypes.STRING(512) },
|
||||
title: { type: DataTypes.STRING(512), allowNull: false },
|
||||
size: { type: DataTypes.BIGINT },
|
||||
type: { type: DataTypes.STRING(16), allowNull: false },
|
||||
uploadDate: { type: DataTypes.DATE, allowNull: false },
|
||||
seeders: { type: DataTypes.SMALLINT },
|
||||
trackers: { type: DataTypes.STRING(8000) },
|
||||
languages: { type: DataTypes.STRING(4096) },
|
||||
resolution: { type: DataTypes.STRING(16) },
|
||||
reviewed: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
|
||||
opened: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false }
|
||||
infoHash: { type: DataTypes.STRING(64), primaryKey: true },
|
||||
provider: { type: DataTypes.STRING(32), allowNull: false },
|
||||
torrentId: { type: DataTypes.STRING(512) },
|
||||
title: { type: DataTypes.STRING(512), allowNull: false },
|
||||
size: { type: DataTypes.BIGINT },
|
||||
type: { type: DataTypes.STRING(16), allowNull: false },
|
||||
uploadDate: { type: DataTypes.DATE, allowNull: false },
|
||||
seeders: { type: DataTypes.SMALLINT },
|
||||
trackers: { type: DataTypes.STRING(8000) },
|
||||
languages: { type: DataTypes.STRING(4096) },
|
||||
resolution: { type: DataTypes.STRING(16) },
|
||||
reviewed: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
|
||||
opened: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false }
|
||||
}
|
||||
);
|
||||
|
||||
const File = database.define('file',
|
||||
{
|
||||
id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true },
|
||||
infoHash: {
|
||||
type: DataTypes.STRING(64),
|
||||
allowNull: false,
|
||||
references: { model: Torrent, key: 'infoHash' },
|
||||
onDelete: 'CASCADE'
|
||||
},
|
||||
fileIndex: { type: DataTypes.INTEGER },
|
||||
title: { type: DataTypes.STRING(512), allowNull: false },
|
||||
size: { type: DataTypes.BIGINT },
|
||||
imdbId: { type: DataTypes.STRING(32) },
|
||||
imdbSeason: { type: DataTypes.INTEGER },
|
||||
imdbEpisode: { type: DataTypes.INTEGER },
|
||||
kitsuId: { type: DataTypes.INTEGER },
|
||||
kitsuEpisode: { type: DataTypes.INTEGER }
|
||||
id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true },
|
||||
infoHash: {
|
||||
type: DataTypes.STRING(64),
|
||||
allowNull: false,
|
||||
references: { model: Torrent, key: 'infoHash' },
|
||||
onDelete: 'CASCADE'
|
||||
},
|
||||
fileIndex: { type: DataTypes.INTEGER },
|
||||
title: { type: DataTypes.STRING(512), allowNull: false },
|
||||
size: { type: DataTypes.BIGINT },
|
||||
imdbId: { type: DataTypes.STRING(32) },
|
||||
imdbSeason: { type: DataTypes.INTEGER },
|
||||
imdbEpisode: { type: DataTypes.INTEGER },
|
||||
kitsuId: { type: DataTypes.INTEGER },
|
||||
kitsuEpisode: { type: DataTypes.INTEGER }
|
||||
},
|
||||
{
|
||||
indexes: [
|
||||
{
|
||||
unique: true,
|
||||
name: 'files_unique_file_constraint',
|
||||
fields: [
|
||||
col('infoHash'),
|
||||
fn('COALESCE', (col('fileIndex')), -1),
|
||||
fn('COALESCE', (col('imdbId')), 'null'),
|
||||
fn('COALESCE', (col('imdbSeason')), -1),
|
||||
fn('COALESCE', (col('imdbEpisode')), -1),
|
||||
fn('COALESCE', (col('kitsuId')), -1),
|
||||
fn('COALESCE', (col('kitsuEpisode')), -1)
|
||||
]
|
||||
},
|
||||
{ unique: false, fields: ['imdbId', 'imdbSeason', 'imdbEpisode'] },
|
||||
{ unique: false, fields: ['kitsuId', 'kitsuEpisode'] }
|
||||
]
|
||||
indexes: [
|
||||
{
|
||||
unique: true,
|
||||
name: 'files_unique_file_constraint',
|
||||
fields: [
|
||||
col('infoHash'),
|
||||
fn('COALESCE', (col('fileIndex')), -1),
|
||||
fn('COALESCE', (col('imdbId')), 'null'),
|
||||
fn('COALESCE', (col('imdbSeason')), -1),
|
||||
fn('COALESCE', (col('imdbEpisode')), -1),
|
||||
fn('COALESCE', (col('kitsuId')), -1),
|
||||
fn('COALESCE', (col('kitsuEpisode')), -1)
|
||||
]
|
||||
},
|
||||
{ unique: false, fields: ['imdbId', 'imdbSeason', 'imdbEpisode'] },
|
||||
{ unique: false, fields: ['kitsuId', 'kitsuEpisode'] }
|
||||
]
|
||||
}
|
||||
);
|
||||
|
||||
const Subtitle = database.define('subtitle',
|
||||
{
|
||||
infoHash: {
|
||||
type: DataTypes.STRING(64),
|
||||
allowNull: false,
|
||||
references: { model: Torrent, key: 'infoHash' },
|
||||
onDelete: 'CASCADE'
|
||||
},
|
||||
fileIndex: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false
|
||||
},
|
||||
fileId: {
|
||||
type: DataTypes.BIGINT,
|
||||
allowNull: true,
|
||||
references: { model: File, key: 'id' },
|
||||
onDelete: 'SET NULL'
|
||||
},
|
||||
title: { type: DataTypes.STRING(512), allowNull: false },
|
||||
infoHash: {
|
||||
type: DataTypes.STRING(64),
|
||||
allowNull: false,
|
||||
references: { model: Torrent, key: 'infoHash' },
|
||||
onDelete: 'CASCADE'
|
||||
},
|
||||
fileIndex: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false
|
||||
},
|
||||
fileId: {
|
||||
type: DataTypes.BIGINT,
|
||||
allowNull: true,
|
||||
references: { model: File, key: 'id' },
|
||||
onDelete: 'SET NULL'
|
||||
},
|
||||
title: { type: DataTypes.STRING(512), allowNull: false },
|
||||
},
|
||||
{
|
||||
timestamps: false,
|
||||
indexes: [
|
||||
{
|
||||
unique: true,
|
||||
name: 'subtitles_unique_subtitle_constraint',
|
||||
fields: [
|
||||
col('infoHash'),
|
||||
col('fileIndex'),
|
||||
fn('COALESCE', (col('fileId')), -1)
|
||||
]
|
||||
},
|
||||
{ unique: false, fields: ['fileId'] }
|
||||
]
|
||||
timestamps: false,
|
||||
indexes: [
|
||||
{
|
||||
unique: true,
|
||||
name: 'subtitles_unique_subtitle_constraint',
|
||||
fields: [
|
||||
col('infoHash'),
|
||||
col('fileIndex'),
|
||||
fn('COALESCE', (col('fileId')), -1)
|
||||
]
|
||||
},
|
||||
{ unique: false, fields: ['fileId'] }
|
||||
]
|
||||
}
|
||||
);
|
||||
|
||||
const Content = database.define('content',
|
||||
{
|
||||
infoHash: {
|
||||
type: DataTypes.STRING(64),
|
||||
primaryKey: true,
|
||||
allowNull: false,
|
||||
references: { model: Torrent, key: 'infoHash' },
|
||||
onDelete: 'CASCADE'
|
||||
},
|
||||
fileIndex: {
|
||||
type: DataTypes.INTEGER,
|
||||
primaryKey: true,
|
||||
allowNull: false
|
||||
},
|
||||
path: { type: DataTypes.STRING(512), allowNull: false },
|
||||
size: { type: DataTypes.BIGINT },
|
||||
infoHash: {
|
||||
type: DataTypes.STRING(64),
|
||||
primaryKey: true,
|
||||
allowNull: false,
|
||||
references: { model: Torrent, key: 'infoHash' },
|
||||
onDelete: 'CASCADE'
|
||||
},
|
||||
fileIndex: {
|
||||
type: DataTypes.INTEGER,
|
||||
primaryKey: true,
|
||||
allowNull: false
|
||||
},
|
||||
path: { type: DataTypes.STRING(512), allowNull: false },
|
||||
size: { type: DataTypes.BIGINT },
|
||||
},
|
||||
{
|
||||
timestamps: false,
|
||||
timestamps: false,
|
||||
}
|
||||
);
|
||||
|
||||
const SkipTorrent = database.define('skip_torrent', {
|
||||
infoHash: { type: DataTypes.STRING(64), primaryKey: true },
|
||||
infoHash: { type: DataTypes.STRING(64), primaryKey: true },
|
||||
});
|
||||
|
||||
Torrent.hasMany(File, { foreignKey: 'infoHash', constraints: false });
|
||||
@@ -183,39 +184,39 @@ File.hasMany(Subtitle, { foreignKey: 'fileId', constraints: false });
|
||||
Subtitle.belongsTo(File, { foreignKey: 'fileId', constraints: false });
|
||||
|
||||
export function connect() {
|
||||
if (databaseConfig.ENABLE_SYNC) {
|
||||
return database.sync({ alter: true })
|
||||
.catch(error => {
|
||||
logger.error('Failed syncing database: ', error);
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
return Promise.resolve();
|
||||
if (databaseConfig.ENABLE_SYNC) {
|
||||
return database.sync({ alter: true })
|
||||
.catch(error => {
|
||||
console.error('Failed syncing database: ', error);
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
export function getProvider(provider) {
|
||||
return Provider.findOrCreate({ where: { name: { [Op.eq]: provider.name } }, defaults: provider })
|
||||
.then((result) => result[0])
|
||||
.catch(() => provider);
|
||||
return Provider.findOrCreate({ where: { name: { [Op.eq]: provider.name } }, defaults: provider })
|
||||
.then((result) => result[0])
|
||||
.catch(() => provider);
|
||||
}
|
||||
|
||||
export function getTorrent(torrent) {
|
||||
const where = torrent.infoHash
|
||||
? { infoHash: torrent.infoHash }
|
||||
: { provider: torrent.provider, torrentId: torrent.torrentId }
|
||||
return Torrent.findOne({ where: where });
|
||||
const where = torrent.infoHash
|
||||
? { infoHash: torrent.infoHash }
|
||||
: { provider: torrent.provider, torrentId: torrent.torrentId }
|
||||
return Torrent.findOne({ where: where });
|
||||
}
|
||||
|
||||
export function getTorrentsBasedOnTitle(titleQuery, type) {
|
||||
return getTorrentsBasedOnQuery({ title: { [Op.regexp]: `${titleQuery}` }, type: type });
|
||||
return getTorrentsBasedOnQuery({ title: { [Op.regexp]: `${titleQuery}` }, type: type });
|
||||
}
|
||||
|
||||
export function getTorrentsBasedOnQuery(where) {
|
||||
return Torrent.findAll({ where: where });
|
||||
return Torrent.findAll({ where: where });
|
||||
}
|
||||
|
||||
export function getFilesBasedOnQuery(where) {
|
||||
return File.findAll({ where: where });
|
||||
return File.findAll({ where: where });
|
||||
}
|
||||
|
||||
export function getUnprocessedIngestedTorrents() {
|
||||
@@ -226,7 +227,7 @@ export function getUnprocessedIngestedTorrents() {
|
||||
[Op.or]: ['tv', 'movies']
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
@@ -239,142 +240,142 @@ export function setIngestedTorrentsProcessed(ingestedTorrents) {
|
||||
}
|
||||
|
||||
export function getTorrentsWithoutSize() {
|
||||
return Torrent.findAll({
|
||||
where: literal(
|
||||
'exists (select 1 from files where files."infoHash" = torrent."infoHash" and files.size = 300000000)'),
|
||||
order: [
|
||||
['seeders', 'DESC']
|
||||
]
|
||||
});
|
||||
return Torrent.findAll({
|
||||
where: literal(
|
||||
'exists (select 1 from files where files."infoHash" = torrent."infoHash" and files.size = 300000000)'),
|
||||
order: [
|
||||
['seeders', 'DESC']
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
export function getUpdateSeedersTorrents(limit = 50) {
|
||||
const until = moment().subtract(7, 'days').format('YYYY-MM-DD');
|
||||
return Torrent.findAll({
|
||||
where: literal(`torrent."updatedAt" < '${until}'`),
|
||||
limit: limit,
|
||||
order: [
|
||||
['seeders', 'DESC'],
|
||||
['updatedAt', 'ASC']
|
||||
]
|
||||
});
|
||||
const until = moment().subtract(7, 'days').format('YYYY-MM-DD');
|
||||
return Torrent.findAll({
|
||||
where: literal(`torrent."updatedAt" < '${until}'`),
|
||||
limit: limit,
|
||||
order: [
|
||||
['seeders', 'DESC'],
|
||||
['updatedAt', 'ASC']
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
export function getUpdateSeedersNewTorrents(limit = 50) {
|
||||
const lastUpdate = moment().subtract(12, 'hours').format('YYYY-MM-DD');
|
||||
const createdAfter = moment().subtract(4, 'days').format('YYYY-MM-DD');
|
||||
return Torrent.findAll({
|
||||
where: literal(`torrent."updatedAt" < '${lastUpdate}' AND torrent."createdAt" > '${createdAfter}'`),
|
||||
limit: limit,
|
||||
order: [
|
||||
['seeders', 'ASC'],
|
||||
['updatedAt', 'ASC']
|
||||
]
|
||||
});
|
||||
const lastUpdate = moment().subtract(12, 'hours').format('YYYY-MM-DD');
|
||||
const createdAfter = moment().subtract(4, 'days').format('YYYY-MM-DD');
|
||||
return Torrent.findAll({
|
||||
where: literal(`torrent."updatedAt" < '${lastUpdate}' AND torrent."createdAt" > '${createdAfter}'`),
|
||||
limit: limit,
|
||||
order: [
|
||||
['seeders', 'ASC'],
|
||||
['updatedAt', 'ASC']
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
export function getNoContentsTorrents() {
|
||||
return Torrent.findAll({
|
||||
where: { opened: false, seeders: { [Op.gte]: 1 } },
|
||||
limit: 500,
|
||||
order: [[fn('RANDOM')]]
|
||||
});
|
||||
return Torrent.findAll({
|
||||
where: { opened: false, seeders: { [Op.gte]: 1 } },
|
||||
limit: 500,
|
||||
order: [[fn('RANDOM')]]
|
||||
});
|
||||
}
|
||||
|
||||
export function createTorrent(torrent) {
|
||||
return Torrent.upsert(torrent)
|
||||
.then(() => createContents(torrent.infoHash, torrent.contents))
|
||||
.then(() => createSubtitles(torrent.infoHash, torrent.subtitles));
|
||||
return Torrent.upsert(torrent)
|
||||
.then(() => createContents(torrent.infoHash, torrent.contents))
|
||||
.then(() => createSubtitles(torrent.infoHash, torrent.subtitles));
|
||||
}
|
||||
|
||||
export function setTorrentSeeders(torrent, seeders) {
|
||||
const where = torrent.infoHash
|
||||
? { infoHash: torrent.infoHash }
|
||||
: { provider: torrent.provider, torrentId: torrent.torrentId }
|
||||
return Torrent.update(
|
||||
{ seeders: seeders },
|
||||
{ where: where }
|
||||
);
|
||||
const where = torrent.infoHash
|
||||
? { infoHash: torrent.infoHash }
|
||||
: { provider: torrent.provider, torrentId: torrent.torrentId }
|
||||
return Torrent.update(
|
||||
{ seeders: seeders },
|
||||
{ where: where }
|
||||
);
|
||||
}
|
||||
|
||||
export function deleteTorrent(torrent) {
|
||||
return Torrent.destroy({ where: { infoHash: torrent.infoHash } })
|
||||
return Torrent.destroy({ where: { infoHash: torrent.infoHash } })
|
||||
}
|
||||
|
||||
export function createFile(file) {
|
||||
if (file.id) {
|
||||
return (file.dataValues ? file.save() : File.upsert(file))
|
||||
.then(() => upsertSubtitles(file, file.subtitles));
|
||||
}
|
||||
if (file.subtitles && file.subtitles.length) {
|
||||
file.subtitles = file.subtitles.map(subtitle => ({ infoHash: file.infoHash, title: subtitle.path, ...subtitle }));
|
||||
}
|
||||
return File.create(file, { include: [Subtitle], ignoreDuplicates: true });
|
||||
if (file.id) {
|
||||
return (file.dataValues ? file.save() : File.upsert(file))
|
||||
.then(() => upsertSubtitles(file, file.subtitles));
|
||||
}
|
||||
if (file.subtitles && file.subtitles.length) {
|
||||
file.subtitles = file.subtitles.map(subtitle => ({ infoHash: file.infoHash, title: subtitle.path, ...subtitle }));
|
||||
}
|
||||
return File.create(file, { include: [Subtitle], ignoreDuplicates: true });
|
||||
}
|
||||
|
||||
export function getFiles(torrent) {
|
||||
return File.findAll({ where: { infoHash: torrent.infoHash } });
|
||||
return File.findAll({ where: { infoHash: torrent.infoHash } });
|
||||
}
|
||||
|
||||
export function getFilesBasedOnTitle(titleQuery) {
|
||||
return File.findAll({ where: { title: { [Op.regexp]: `${titleQuery}` } } });
|
||||
return File.findAll({ where: { title: { [Op.regexp]: `${titleQuery}` } } });
|
||||
}
|
||||
|
||||
export function deleteFile(file) {
|
||||
return File.destroy({ where: { id: file.id } })
|
||||
return File.destroy({ where: { id: file.id } })
|
||||
}
|
||||
|
||||
export function createSubtitles(infoHash, subtitles) {
|
||||
if (subtitles && subtitles.length) {
|
||||
return Subtitle.bulkCreate(subtitles.map(subtitle => ({ infoHash, title: subtitle.path, ...subtitle })));
|
||||
}
|
||||
return Promise.resolve();
|
||||
if (subtitles && subtitles.length) {
|
||||
return Subtitle.bulkCreate(subtitles.map(subtitle => ({ infoHash, title: subtitle.path, ...subtitle })));
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
export function upsertSubtitles(file, subtitles) {
|
||||
if (file.id && subtitles && subtitles.length) {
|
||||
return Promises.sequence(subtitles
|
||||
.map(subtitle => {
|
||||
subtitle.fileId = file.id;
|
||||
subtitle.infoHash = subtitle.infoHash || file.infoHash;
|
||||
subtitle.title = subtitle.title || subtitle.path;
|
||||
return subtitle;
|
||||
})
|
||||
.map(subtitle => () => subtitle.dataValues ? subtitle.save() : Subtitle.create(subtitle)));
|
||||
}
|
||||
return Promise.resolve();
|
||||
if (file.id && subtitles && subtitles.length) {
|
||||
return Promises.sequence(subtitles
|
||||
.map(subtitle => {
|
||||
subtitle.fileId = file.id;
|
||||
subtitle.infoHash = subtitle.infoHash || file.infoHash;
|
||||
subtitle.title = subtitle.title || subtitle.path;
|
||||
return subtitle;
|
||||
})
|
||||
.map(subtitle => () => subtitle.dataValues ? subtitle.save() : Subtitle.create(subtitle)));
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
export function getSubtitles(torrent) {
|
||||
return Subtitle.findAll({ where: { infoHash: torrent.infoHash } });
|
||||
return Subtitle.findAll({ where: { infoHash: torrent.infoHash } });
|
||||
}
|
||||
|
||||
export function getUnassignedSubtitles() {
|
||||
return Subtitle.findAll({ where: { fileId: null } });
|
||||
return Subtitle.findAll({ where: { fileId: null } });
|
||||
}
|
||||
|
||||
export function createContents(infoHash, contents) {
|
||||
if (contents && contents.length) {
|
||||
return Content.bulkCreate(contents.map(content => ({ infoHash, ...content })), { ignoreDuplicates: true })
|
||||
.then(() => Torrent.update({ opened: true }, { where: { infoHash: infoHash }, silent: true }));
|
||||
}
|
||||
return Promise.resolve();
|
||||
if (contents && contents.length) {
|
||||
return Content.bulkCreate(contents.map(content => ({ infoHash, ...content })), { ignoreDuplicates: true })
|
||||
.then(() => Torrent.update({ opened: true }, { where: { infoHash: infoHash }, silent: true }));
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
export function getContents(torrent) {
|
||||
return Content.findAll({ where: { infoHash: torrent.infoHash } });
|
||||
return Content.findAll({ where: { infoHash: torrent.infoHash } });
|
||||
}
|
||||
|
||||
export function getSkipTorrent(torrent) {
|
||||
return SkipTorrent.findByPk(torrent.infoHash)
|
||||
.then((result) => {
|
||||
if (!result) {
|
||||
throw new Error(`torrent not found: ${torrent.infoHash}`);
|
||||
}
|
||||
return result.dataValues;
|
||||
})
|
||||
return SkipTorrent.findByPk(torrent.infoHash)
|
||||
.then((result) => {
|
||||
if (!result) {
|
||||
throw new Error(`torrent not found: ${torrent.infoHash}`);
|
||||
}
|
||||
return result.dataValues;
|
||||
})
|
||||
}
|
||||
|
||||
export function createSkipTorrent(torrent) {
|
||||
return SkipTorrent.upsert({ infoHash: torrent.infoHash });
|
||||
return SkipTorrent.upsert({ infoHash: torrent.infoHash });
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user