Merge and simplify the environment variables in

addon and consumer.

Todo: producer

Change DATABASE_URI to be generic POSTGRES variables

DOES NOT WORK - First pass at upgrading environment variables

PostgreSQL environment variables have been split for addon and consumer. ENABLE_SYNC hard coded as `true`

MongoDB variables update.

Make the addon code more similar to the consumer code

Get some parity between addon and consumer
This commit is contained in:
purple_emily
2024-02-04 08:30:55 +00:00
parent b7c3c4376b
commit cd3c2d3fe6
7 changed files with 469 additions and 372 deletions

40
.env Normal file
View File

@@ -0,0 +1,40 @@
# General environment variables
TZ=London/Europe
# PostgreSQL
POSTGRES_HOST=postgres
POSTGRES_PORT=5432
POSTGRES_USER=postgres
POSTGRES_PASSWORD=postgres
POSTGRES_DB=knightcrawler
# MongoDB
MONGODB_HOST=mongodb
MONGODB_PORT=27017
MONGODB_DB=knightcrawler
MONGO_INITDB_ROOT_USERNAME=mongo
MONGO_INITDB_ROOT_PASSWORD=mongo
# Addon
DEBUG_MODE=false
# Consumer
RABBIT_URI=amqp://guest:guest@rabbitmq:5672/?heartbeat=30
QUEUE_NAME=ingested
JOB_CONCURRENCY=5
JOBS_ENABLED=true
MAX_SINGLE_TORRENT_CONNECTIONS=10
TORRENT_TIMEOUT=30000
UDP_TRACKERS_ENABLED=true
# Producer
ScrapeConfiguration__StorageConnectionString=host=postgres;username=postgres;password=postgres;database=knightcrawler;
RabbitMqConfiguration__Host=rabbitmq
RabbitMqConfiguration__QueueName=ingested
RabbitMqConfiguration__Username=guest
RabbitMqConfiguration__Password=guest
RabbitMqConfiguration__Durable=true
RabbitMqConfiguration__MaxQueueSize=0
RabbitMqConfiguration__MaxPublishBatchSize=500
RabbitMqConfiguration__PublishIntervalInSeconds=10
GithubSettings__PAT=

View File

@@ -35,10 +35,9 @@ x-apps: &knightcrawler-app
services:
postgres:
image: postgres:latest
env_file:
- .env
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: knightcrawler
PGUSER: postgres # needed for healthcheck.
ports:
- "5432:5432"
@@ -51,9 +50,8 @@ services:
mongodb:
image: mongo:latest
environment:
MONGO_INITDB_ROOT_USERNAME: mongo
MONGO_INITDB_ROOT_PASSWORD: mongo
env_file:
- .env
ports:
- "27017:27017"
volumes:
@@ -81,7 +79,7 @@ services:
context: src/producer
dockerfile: Dockerfile
env_file:
- env/producer.env
- .env
<<: *knightcrawler-app
networks:
- knightcrawler-network
@@ -91,7 +89,7 @@ services:
context: src/node/consumer
dockerfile: Dockerfile
env_file:
- env/consumer.env
- .env
deploy:
replicas: 3
<<: *knightcrawler-app
@@ -105,7 +103,7 @@ services:
ports:
- "7000:7000"
env_file:
- env/addon.env
- .env
<<: *knightcrawler-app
networks:
- knightcrawler-network

View File

@@ -1,5 +1,6 @@
import cacheManager from 'cache-manager';
import mangodbStore from 'cache-manager-mongodb';
import { cacheConfig } from './config.js';
import { isStaticUrl } from '../moch/static.js';
const GLOBAL_KEY_PREFIX = 'knightcrawler-addon';
@@ -14,19 +15,16 @@ const AVAILABILITY_EMPTY_TTL = 30 * 60; // 30 minutes
const MESSAGE_VIDEO_URL_TTL = 60; // 1 minutes
// When the streams are empty we want to cache it for less time in case of timeouts or failures
const MONGO_URI = process.env.MONGODB_URI;
const NO_CACHE = process.env.NO_CACHE || false;
const memoryCache = initiateMemoryCache();
const remoteCache = initiateRemoteCache();
function initiateRemoteCache() {
if (NO_CACHE) {
if (cacheConfig.NO_CACHE) {
return null;
} else if (MONGO_URI) {
} else if (cacheConfig.MONGO_URI) {
return cacheManager.caching({
store: mangodbStore,
uri: MONGO_URI,
uri: cacheConfig.MONGO_URI,
options: {
collection: 'knightcrawler_addon_collection',
socketTimeoutMS: 120000,
@@ -54,7 +52,7 @@ function initiateMemoryCache() {
}
function cacheWrap(cache, key, method, options) {
if (NO_CACHE || !cache) {
if (cacheConfig.NO_CACHE || !cache) {
return method();
}
return cache.wrap(key, method, options);

View File

@@ -0,0 +1,38 @@
export const cacheConfig = {
MONGODB_HOST: process.env.MONGODB_HOST || 'mongodb',
MONGODB_PORT: process.env.MONGODB_PORT || '27017',
MONGODB_DB: process.env.MONGODB_DB || 'selfhostio',
MONGO_INITDB_ROOT_USERNAME: process.env.MONGO_INITDB_ROOT_USERNAME || 'mongo',
MONGO_INITDB_ROOT_PASSWORD: process.env.MONGO_INITDB_ROOT_PASSWORD || 'mongo',
COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'selfhostio_consumer_collection',
NO_CACHE: parseBool(process.env.NO_CACHE, false),
}
// Combine the environment variables into a connection string
// The combined string will look something like:
// 'mongodb://mongo:mongo@localhost:27017/selfhostio?authSource=admin'
cacheConfig.MONGO_URI = 'mongodb://' + cacheConfig.MONGO_INITDB_ROOT_USERNAME + ':' + cacheConfig.MONGO_INITDB_ROOT_PASSWORD + '@' + cacheConfig.MONGODB_HOST + ':' + cacheConfig.MONGODB_PORT + '/' + cacheConfig.MONGODB_DB + '?authSource=admin';
export const databaseConfig = {
POSTGRES_HOST: process.env.POSTGRES_HOST || 'postgres',
POSTGRES_PORT: process.env.POSTGRES_PORT || '5432',
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE || 'selfhostio',
POSTGRES_USERNAME: process.env.POSTGRES_USERNAME || 'postgres',
POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD || 'postgres',
}
// Combine the environment variables into a connection string
// The combined string will look something like:
// 'postgres://postgres:postgres@localhost:5432/selfhostio'
databaseConfig.POSTGRES_URI = 'postgres://' + databaseConfig.POSTGRES_USERNAME + ':' + databaseConfig.POSTGRES_PASSWORD + '@' + databaseConfig.POSTGRES_HOST + ':' + databaseConfig.POSTGRES_PORT + '/' + databaseConfig.POSTGRES_DATABASE;
function parseBool(boolString, defaultValue) {
const isString = typeof boolString === 'string' || boolString instanceof String;
if (!isString) {
return defaultValue;
}
return boolString.toLowerCase() === 'true' ? true : defaultValue;
}

View File

@@ -1,10 +1,14 @@
import { Sequelize } from 'sequelize';
import { databaseConfig } from './config.js';
const { Op } = Sequelize;
const DATABASE_URI = process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/postgres';
const database = new Sequelize(DATABASE_URI, { logging: false });
const database = new Sequelize(
databaseConfig.POSTGRES_URI,
{
logging: false
}
);
const Torrent = database.define('torrent',
{
@@ -73,7 +77,7 @@ export function getTorrent(infoHash) {
}
export function getFiles(infoHashes) {
return File.findAll({ where: { infoHash: { [Op.in]: infoHashes} } });
return File.findAll({ where: { infoHash: { [Op.in]: infoHashes } } });
}
export function getImdbIdMovieEntries(imdbId) {

View File

@@ -4,16 +4,34 @@
}
export const cacheConfig = {
MONGO_URI: process.env.MONGODB_URI || 'mongodb://mongo:mongo@localhost:27017/knightcrawler?authSource=admin',
MONGODB_HOST: process.env.MONGODB_HOST || 'mongodb',
MONGODB_PORT: process.env.MONGODB_PORT || '27017',
MONGODB_DB: process.env.MONGODB_DB || 'knightcrawler',
MONGO_INITDB_ROOT_USERNAME: process.env.MONGO_INITDB_ROOT_USERNAME || 'mongo',
MONGO_INITDB_ROOT_PASSWORD: process.env.MONGO_INITDB_ROOT_PASSWORD || 'mongo',
NO_CACHE: parseBool(process.env.NO_CACHE, false),
COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'knightcrawler_consumer_collection'
}
// Combine the environment variables into a connection string
// The combined string will look something like:
// 'mongodb://mongo:mongo@localhost:27017/knightcrawler?authSource=admin'
cacheConfig.MONGO_URI = 'mongodb://' + cacheConfig.MONGO_INITDB_ROOT_USERNAME + ':' + cacheConfig.MONGO_INITDB_ROOT_PASSWORD + '@' + cacheConfig.MONGODB_HOST + ':' + cacheConfig.MONGODB_PORT + '/' + cacheConfig.MONGODB_DB + '?authSource=admin';
export const databaseConfig = {
DATABASE_URI: process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/knightcrawler',
ENABLE_SYNC: parseBool(process.env.ENABLE_SYNC, true)
POSTGRES_HOST: process.env.POSTGRES_HOST || 'postgres',
POSTGRES_PORT: process.env.POSTGRES_PORT || '5432',
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE || 'knightcrawler',
POSTGRES_USERNAME: process.env.POSTGRES_USERNAME || 'postgres',
POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD || 'postgres',
ENABLE_SYNC: true
}
// Combine the environment variables into a connection string
// The combined string will look something like:
// 'postgres://postgres:postgres@localhost:5432/knightcrawler'
databaseConfig.POSTGRES_URI = 'postgres://' + databaseConfig.POSTGRES_USERNAME + ':' + databaseConfig.POSTGRES_PASSWORD + '@' + databaseConfig.POSTGRES_HOST + ':' + databaseConfig.POSTGRES_PORT + '/' + databaseConfig.POSTGRES_DATABASE;
export const jobConfig = {
JOB_CONCURRENCY: parseInt(process.env.JOB_CONCURRENCY || 1),
JOBS_ENABLED: parseBool(process.env.JOBS_ENABLED || true)

View File

@@ -5,7 +5,7 @@ import {logger} from "./logger.js";
import * as Promises from './promises.js';
const database = new Sequelize(
databaseConfig.DATABASE_URI,
databaseConfig.POSTGRES_URI,
{
logging: false
}
@@ -30,7 +30,8 @@ const IngestedTorrent = database.define('ingested_torrent', {
processed: {
type: DataTypes.BOOLEAN,
defaultValue: false
}},
}
},
{
indexes: [
{
@@ -44,7 +45,7 @@ const IngestedTorrent = database.define('ingested_torrent', {
const IngestedPage = database.define('ingested_page', {
id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true },
url: { type: DataTypes.STRING, allowNull: false },
},
},
{
indexes: [
{
@@ -186,7 +187,7 @@ export function connect() {
if (databaseConfig.ENABLE_SYNC) {
return database.sync({ alter: true })
.catch(error => {
logger.error('Failed syncing database: ', error);
console.error('Failed syncing database: ', error);
throw error;
});
}