Merge and simplify the environment variables in

addon and consumer.

Todo: producer

Change DATABASE_URI to be generic POSTGRES variables

DOES NOT WORK - First pass at upgrading environment variables

PostgreSQL environment variables have been split for addon and consumer. ENABLE_SYNC hard coded as `true`

MongoDB variables update.

Make the addon code more similar to the consumer code

Get some parity between addon and consumer
This commit is contained in:
purple_emily
2024-02-04 08:30:55 +00:00
parent b7c3c4376b
commit cd3c2d3fe6
7 changed files with 469 additions and 372 deletions

View File

@@ -1,6 +1,7 @@
import cacheManager from 'cache-manager';
import mangodbStore from 'cache-manager-mongodb';
import { isStaticUrl } from '../moch/static.js';
import { cacheConfig } from './config.js';
import { isStaticUrl } from '../moch/static.js';
const GLOBAL_KEY_PREFIX = 'knightcrawler-addon';
const STREAM_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|stream`;
@@ -14,90 +15,87 @@ const AVAILABILITY_EMPTY_TTL = 30 * 60; // 30 minutes
const MESSAGE_VIDEO_URL_TTL = 60; // 1 minutes
// When the streams are empty we want to cache it for less time in case of timeouts or failures
const MONGO_URI = process.env.MONGODB_URI;
const NO_CACHE = process.env.NO_CACHE || false;
const memoryCache = initiateMemoryCache();
const remoteCache = initiateRemoteCache();
function initiateRemoteCache() {
if (NO_CACHE) {
return null;
} else if (MONGO_URI) {
return cacheManager.caching({
store: mangodbStore,
uri: MONGO_URI,
options: {
collection: 'knightcrawler_addon_collection',
socketTimeoutMS: 120000,
useNewUrlParser: true,
useUnifiedTopology: false,
ttl: STREAM_EMPTY_TTL
},
ttl: STREAM_EMPTY_TTL,
ignoreCacheErrors: true
});
} else {
return cacheManager.caching({
store: 'memory',
ttl: STREAM_EMPTY_TTL
});
}
if (cacheConfig.NO_CACHE) {
return null;
} else if (cacheConfig.MONGO_URI) {
return cacheManager.caching({
store: mangodbStore,
uri: cacheConfig.MONGO_URI,
options: {
collection: 'knightcrawler_addon_collection',
socketTimeoutMS: 120000,
useNewUrlParser: true,
useUnifiedTopology: false,
ttl: STREAM_EMPTY_TTL
},
ttl: STREAM_EMPTY_TTL,
ignoreCacheErrors: true
});
} else {
return cacheManager.caching({
store: 'memory',
ttl: STREAM_EMPTY_TTL
});
}
}
function initiateMemoryCache() {
return cacheManager.caching({
store: 'memory',
ttl: MESSAGE_VIDEO_URL_TTL,
max: Infinity // infinite LRU cache size
});
return cacheManager.caching({
store: 'memory',
ttl: MESSAGE_VIDEO_URL_TTL,
max: Infinity // infinite LRU cache size
});
}
function cacheWrap(cache, key, method, options) {
if (NO_CACHE || !cache) {
return method();
}
return cache.wrap(key, method, options);
if (cacheConfig.NO_CACHE || !cache) {
return method();
}
return cache.wrap(key, method, options);
}
export function cacheWrapStream(id, method) {
return cacheWrap(remoteCache, `${STREAM_KEY_PREFIX}:${id}`, method, {
ttl: (streams) => streams.length ? STREAM_TTL : STREAM_EMPTY_TTL
});
return cacheWrap(remoteCache, `${STREAM_KEY_PREFIX}:${id}`, method, {
ttl: (streams) => streams.length ? STREAM_TTL : STREAM_EMPTY_TTL
});
}
export function cacheWrapResolvedUrl(id, method) {
return cacheWrap(memoryCache, `${RESOLVED_URL_KEY_PREFIX}:${id}`, method, {
ttl: (url) => isStaticUrl(url) ? MESSAGE_VIDEO_URL_TTL : STREAM_TTL
});
return cacheWrap(memoryCache, `${RESOLVED_URL_KEY_PREFIX}:${id}`, method, {
ttl: (url) => isStaticUrl(url) ? MESSAGE_VIDEO_URL_TTL : STREAM_TTL
});
}
export function cacheAvailabilityResults(results) {
Object.keys(results)
.forEach(infoHash => {
const key = `${AVAILABILITY_KEY_PREFIX}:${infoHash}`;
const value = results[infoHash];
const ttl = value?.length ? AVAILABILITY_TTL : AVAILABILITY_EMPTY_TTL;
memoryCache.set(key, value, { ttl })
});
return results;
Object.keys(results)
.forEach(infoHash => {
const key = `${AVAILABILITY_KEY_PREFIX}:${infoHash}`;
const value = results[infoHash];
const ttl = value?.length ? AVAILABILITY_TTL : AVAILABILITY_EMPTY_TTL;
memoryCache.set(key, value, { ttl })
});
return results;
}
export function getCachedAvailabilityResults(infoHashes) {
const keys = infoHashes.map(infoHash => `${AVAILABILITY_KEY_PREFIX}:${infoHash}`)
return new Promise(resolve => {
memoryCache.mget(...keys, (error, result) => {
if (error) {
console.log('Failed retrieve availability cache', error)
return resolve({});
}
const availabilityResults = {};
infoHashes.forEach((infoHash, index) => {
if (result[index]) {
availabilityResults[infoHash] = result[index];
}
});
resolve(availabilityResults);
})
});
const keys = infoHashes.map(infoHash => `${AVAILABILITY_KEY_PREFIX}:${infoHash}`)
return new Promise(resolve => {
memoryCache.mget(...keys, (error, result) => {
if (error) {
console.log('Failed retrieve availability cache', error)
return resolve({});
}
const availabilityResults = {};
infoHashes.forEach((infoHash, index) => {
if (result[index]) {
availabilityResults[infoHash] = result[index];
}
});
resolve(availabilityResults);
})
});
}

View File

@@ -0,0 +1,38 @@
export const cacheConfig = {
MONGODB_HOST: process.env.MONGODB_HOST || 'mongodb',
MONGODB_PORT: process.env.MONGODB_PORT || '27017',
MONGODB_DB: process.env.MONGODB_DB || 'selfhostio',
MONGO_INITDB_ROOT_USERNAME: process.env.MONGO_INITDB_ROOT_USERNAME || 'mongo',
MONGO_INITDB_ROOT_PASSWORD: process.env.MONGO_INITDB_ROOT_PASSWORD || 'mongo',
COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'selfhostio_consumer_collection',
NO_CACHE: parseBool(process.env.NO_CACHE, false),
}
// Combine the environment variables into a connection string
// The combined string will look something like:
// 'mongodb://mongo:mongo@localhost:27017/selfhostio?authSource=admin'
cacheConfig.MONGO_URI = 'mongodb://' + cacheConfig.MONGO_INITDB_ROOT_USERNAME + ':' + cacheConfig.MONGO_INITDB_ROOT_PASSWORD + '@' + cacheConfig.MONGODB_HOST + ':' + cacheConfig.MONGODB_PORT + '/' + cacheConfig.MONGODB_DB + '?authSource=admin';
export const databaseConfig = {
POSTGRES_HOST: process.env.POSTGRES_HOST || 'postgres',
POSTGRES_PORT: process.env.POSTGRES_PORT || '5432',
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE || 'selfhostio',
POSTGRES_USERNAME: process.env.POSTGRES_USERNAME || 'postgres',
POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD || 'postgres',
}
// Combine the environment variables into a connection string
// The combined string will look something like:
// 'postgres://postgres:postgres@localhost:5432/selfhostio'
databaseConfig.POSTGRES_URI = 'postgres://' + databaseConfig.POSTGRES_USERNAME + ':' + databaseConfig.POSTGRES_PASSWORD + '@' + databaseConfig.POSTGRES_HOST + ':' + databaseConfig.POSTGRES_PORT + '/' + databaseConfig.POSTGRES_DATABASE;
function parseBool(boolString, defaultValue) {
const isString = typeof boolString === 'string' || boolString instanceof String;
if (!isString) {
return defaultValue;
}
return boolString.toLowerCase() === 'true' ? true : defaultValue;
}

View File

@@ -1,64 +1,68 @@
import { Sequelize } from 'sequelize';
import { databaseConfig } from './config.js';
const { Op } = Sequelize;
const DATABASE_URI = process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/postgres';
const database = new Sequelize(DATABASE_URI, { logging: false });
const database = new Sequelize(
databaseConfig.POSTGRES_URI,
{
logging: false
}
);
const Torrent = database.define('torrent',
{
infoHash: { type: Sequelize.STRING(64), primaryKey: true },
provider: { type: Sequelize.STRING(32), allowNull: false },
torrentId: { type: Sequelize.STRING(128) },
title: { type: Sequelize.STRING(256), allowNull: false },
size: { type: Sequelize.BIGINT },
type: { type: Sequelize.STRING(16), allowNull: false },
uploadDate: { type: Sequelize.DATE, allowNull: false },
seeders: { type: Sequelize.SMALLINT },
trackers: { type: Sequelize.STRING(4096) },
languages: { type: Sequelize.STRING(4096) },
resolution: { type: Sequelize.STRING(16) }
infoHash: { type: Sequelize.STRING(64), primaryKey: true },
provider: { type: Sequelize.STRING(32), allowNull: false },
torrentId: { type: Sequelize.STRING(128) },
title: { type: Sequelize.STRING(256), allowNull: false },
size: { type: Sequelize.BIGINT },
type: { type: Sequelize.STRING(16), allowNull: false },
uploadDate: { type: Sequelize.DATE, allowNull: false },
seeders: { type: Sequelize.SMALLINT },
trackers: { type: Sequelize.STRING(4096) },
languages: { type: Sequelize.STRING(4096) },
resolution: { type: Sequelize.STRING(16) }
}
);
const File = database.define('file',
{
id: { type: Sequelize.BIGINT, autoIncrement: true, primaryKey: true },
infoHash: {
type: Sequelize.STRING(64),
allowNull: false,
references: { model: Torrent, key: 'infoHash' },
onDelete: 'CASCADE'
},
fileIndex: { type: Sequelize.INTEGER },
title: { type: Sequelize.STRING(256), allowNull: false },
size: { type: Sequelize.BIGINT },
imdbId: { type: Sequelize.STRING(32) },
imdbSeason: { type: Sequelize.INTEGER },
imdbEpisode: { type: Sequelize.INTEGER },
kitsuId: { type: Sequelize.INTEGER },
kitsuEpisode: { type: Sequelize.INTEGER }
id: { type: Sequelize.BIGINT, autoIncrement: true, primaryKey: true },
infoHash: {
type: Sequelize.STRING(64),
allowNull: false,
references: { model: Torrent, key: 'infoHash' },
onDelete: 'CASCADE'
},
fileIndex: { type: Sequelize.INTEGER },
title: { type: Sequelize.STRING(256), allowNull: false },
size: { type: Sequelize.BIGINT },
imdbId: { type: Sequelize.STRING(32) },
imdbSeason: { type: Sequelize.INTEGER },
imdbEpisode: { type: Sequelize.INTEGER },
kitsuId: { type: Sequelize.INTEGER },
kitsuEpisode: { type: Sequelize.INTEGER }
},
);
const Subtitle = database.define('subtitle',
{
infoHash: {
type: Sequelize.STRING(64),
allowNull: false,
references: { model: Torrent, key: 'infoHash' },
onDelete: 'CASCADE'
},
fileIndex: { type: Sequelize.INTEGER, allowNull: false },
fileId: {
type: Sequelize.BIGINT,
allowNull: true,
references: { model: File, key: 'id' },
onDelete: 'SET NULL'
},
title: { type: Sequelize.STRING(512), allowNull: false },
size: { type: Sequelize.BIGINT, allowNull: false },
infoHash: {
type: Sequelize.STRING(64),
allowNull: false,
references: { model: Torrent, key: 'infoHash' },
onDelete: 'CASCADE'
},
fileIndex: { type: Sequelize.INTEGER, allowNull: false },
fileId: {
type: Sequelize.BIGINT,
allowNull: true,
references: { model: File, key: 'id' },
onDelete: 'SET NULL'
},
title: { type: Sequelize.STRING(512), allowNull: false },
size: { type: Sequelize.BIGINT, allowNull: false },
},
{ timestamps: false }
);
@@ -69,66 +73,66 @@ File.hasMany(Subtitle, { foreignKey: 'fileId', constraints: false });
Subtitle.belongsTo(File, { foreignKey: 'fileId', constraints: false });
export function getTorrent(infoHash) {
return Torrent.findOne({ where: { infoHash: infoHash } });
return Torrent.findOne({ where: { infoHash: infoHash } });
}
export function getFiles(infoHashes) {
return File.findAll({ where: { infoHash: { [Op.in]: infoHashes} } });
return File.findAll({ where: { infoHash: { [Op.in]: infoHashes } } });
}
export function getImdbIdMovieEntries(imdbId) {
return File.findAll({
where: {
imdbId: { [Op.eq]: imdbId }
},
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
]
});
return File.findAll({
where: {
imdbId: { [Op.eq]: imdbId }
},
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
]
});
}
export function getImdbIdSeriesEntries(imdbId, season, episode) {
return File.findAll({
where: {
imdbId: { [Op.eq]: imdbId },
imdbSeason: { [Op.eq]: season },
imdbEpisode: { [Op.eq]: episode }
},
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
]
});
return File.findAll({
where: {
imdbId: { [Op.eq]: imdbId },
imdbSeason: { [Op.eq]: season },
imdbEpisode: { [Op.eq]: episode }
},
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
]
});
}
export function getKitsuIdMovieEntries(kitsuId) {
return File.findAll({
where: {
kitsuId: { [Op.eq]: kitsuId }
},
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
]
});
return File.findAll({
where: {
kitsuId: { [Op.eq]: kitsuId }
},
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
]
});
}
export function getKitsuIdSeriesEntries(kitsuId, episode) {
return File.findAll({
where: {
kitsuId: { [Op.eq]: kitsuId },
kitsuEpisode: { [Op.eq]: episode }
},
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
]
});
return File.findAll({
where: {
kitsuId: { [Op.eq]: kitsuId },
kitsuEpisode: { [Op.eq]: episode }
},
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
]
});
}