Merge pull request #46 from purple-emily/change-to-single-env-file
Simplify the environment variables
This commit is contained in:
39
.env.example
Normal file
39
.env.example
Normal file
@@ -0,0 +1,39 @@
|
||||
# General environment variables
|
||||
TZ=London/Europe
|
||||
|
||||
# PostgreSQL
|
||||
POSTGRES_HOST=postgres
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_USER=postgres
|
||||
POSTGRES_PASSWORD=postgres
|
||||
POSTGRES_DB=knightcrawler
|
||||
|
||||
# MongoDB
|
||||
MONGODB_HOST=mongodb
|
||||
MONGODB_PORT=27017
|
||||
MONGODB_DB=knightcrawler
|
||||
MONGO_INITDB_ROOT_USERNAME=mongo
|
||||
MONGO_INITDB_ROOT_PASSWORD=mongo
|
||||
|
||||
# Addon
|
||||
DEBUG_MODE=false
|
||||
|
||||
# Consumer
|
||||
RABBIT_URI=amqp://guest:guest@rabbitmq:5672/?heartbeat=30
|
||||
QUEUE_NAME=ingested
|
||||
JOB_CONCURRENCY=5
|
||||
JOBS_ENABLED=true
|
||||
MAX_SINGLE_TORRENT_CONNECTIONS=10
|
||||
TORRENT_TIMEOUT=30000
|
||||
UDP_TRACKERS_ENABLED=true
|
||||
|
||||
# Producer
|
||||
RabbitMqConfiguration__Host=rabbitmq
|
||||
RabbitMqConfiguration__QueueName=ingested
|
||||
RabbitMqConfiguration__Username=guest
|
||||
RabbitMqConfiguration__Password=guest
|
||||
RabbitMqConfiguration__Durable=true
|
||||
RabbitMqConfiguration__MaxQueueSize=0
|
||||
RabbitMqConfiguration__MaxPublishBatchSize=500
|
||||
RabbitMqConfiguration__PublishIntervalInSeconds=10
|
||||
GithubSettings__PAT=
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -2,6 +2,7 @@
|
||||
.now
|
||||
.DS_Store
|
||||
.idea
|
||||
.env
|
||||
|
||||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
|
||||
13
README.md
13
README.md
@@ -13,6 +13,7 @@ A self-hosted Stremio addon for streaming torrents via a debrid service.
|
||||
- [Overview](#overview)
|
||||
- [Using](#using)
|
||||
- [Initial setup (optional)](#initial-setup-optional)
|
||||
- [Environment Setup](#environment-setup)
|
||||
- [Run the project](#run-the-project)
|
||||
- [Monitoring with Grafana and Prometheus (Optional)](#monitoring-with-grafana-and-prometheus-optional)
|
||||
- [Accessing RabbitMQ Management](#accessing-rabbitmq-management)
|
||||
@@ -60,6 +61,17 @@ We can search DebridMediaManager hash lists which are hosted on GitHub. This all
|
||||
GithubSettings__PAT=<YOUR TOKEN HERE>
|
||||
```
|
||||
|
||||
|
||||
### Environment Setup
|
||||
|
||||
Before running the project, you need to set up the environment variables. Copy the `.env.example` file to `.env`:
|
||||
|
||||
```sh
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
Then set any of th values you'd like to customize.
|
||||
|
||||
### Run the project
|
||||
|
||||
Open a terminal in the directory and run the command:
|
||||
@@ -108,6 +120,7 @@ Now, you can use these dashboards to monitor RabbitMQ and Postgres metrics.
|
||||
|
||||
Note: If you encounter issues with missing or unavailable data in Grafana, please ensure on [Prometheus's target page](http://127.0.0.1:9090/targets) that the RabbitMQ target is up and running.
|
||||
|
||||
|
||||
## Importing external dumps
|
||||
|
||||
A brief record of the steps required to import external data, in this case the rarbg dump which can be found on RD:
|
||||
|
||||
@@ -35,10 +35,9 @@ x-apps: &knightcrawler-app
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:latest
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: knightcrawler
|
||||
PGUSER: postgres # needed for healthcheck.
|
||||
ports:
|
||||
- "5432:5432"
|
||||
@@ -51,9 +50,8 @@ services:
|
||||
|
||||
mongodb:
|
||||
image: mongo:latest
|
||||
environment:
|
||||
MONGO_INITDB_ROOT_USERNAME: mongo
|
||||
MONGO_INITDB_ROOT_PASSWORD: mongo
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- "27017:27017"
|
||||
volumes:
|
||||
@@ -81,7 +79,7 @@ services:
|
||||
context: src/producer
|
||||
dockerfile: Dockerfile
|
||||
env_file:
|
||||
- env/producer.env
|
||||
- .env
|
||||
<<: *knightcrawler-app
|
||||
networks:
|
||||
- knightcrawler-network
|
||||
@@ -91,7 +89,7 @@ services:
|
||||
context: src/node/consumer
|
||||
dockerfile: Dockerfile
|
||||
env_file:
|
||||
- env/consumer.env
|
||||
- .env
|
||||
deploy:
|
||||
replicas: 3
|
||||
<<: *knightcrawler-app
|
||||
@@ -105,7 +103,7 @@ services:
|
||||
ports:
|
||||
- "7000:7000"
|
||||
env_file:
|
||||
- env/addon.env
|
||||
- .env
|
||||
<<: *knightcrawler-app
|
||||
networks:
|
||||
- knightcrawler-network
|
||||
|
||||
4
env/addon.env
vendored
4
env/addon.env
vendored
@@ -1,4 +0,0 @@
|
||||
TZ=London/Europe
|
||||
DATABASE_URI=postgres://postgres:postgres@postgres/knightcrawler
|
||||
MONGODB_URI=mongodb://mongo:mongo@mongodb/knightcrawler?tls=false&authSource=admin
|
||||
DEBUG_MODE=false
|
||||
11
env/consumer.env
vendored
11
env/consumer.env
vendored
@@ -1,11 +0,0 @@
|
||||
TZ=London/Europe
|
||||
MONGODB_URI=mongodb://mongo:mongo@mongodb/knightcrawler?tls=false&authSource=admin
|
||||
DATABASE_URI=postgres://postgres:postgres@postgres/knightcrawler
|
||||
RABBIT_URI=amqp://guest:guest@rabbitmq:5672/?heartbeat=30
|
||||
QUEUE_NAME=ingested
|
||||
JOB_CONCURRENCY=5
|
||||
JOBS_ENABLED=true
|
||||
ENABLE_SYNC=true
|
||||
MAX_SINGLE_TORRENT_CONNECTIONS=10
|
||||
TORRENT_TIMEOUT=30000
|
||||
UDP_TRACKERS_ENABLED=true
|
||||
10
env/producer.env
vendored
10
env/producer.env
vendored
@@ -1,10 +0,0 @@
|
||||
ScrapeConfiguration__StorageConnectionString=host=postgres;username=postgres;password=postgres;database=knightcrawler;
|
||||
RabbitMqConfiguration__Host=rabbitmq
|
||||
RabbitMqConfiguration__QueueName=ingested
|
||||
RabbitMqConfiguration__Username=guest
|
||||
RabbitMqConfiguration__Password=guest
|
||||
RabbitMqConfiguration__Durable=true
|
||||
RabbitMqConfiguration__MaxQueueSize=0
|
||||
RabbitMqConfiguration__MaxPublishBatchSize=500
|
||||
RabbitMqConfiguration__PublishIntervalInSeconds=10
|
||||
GithubSettings__PAT=
|
||||
@@ -1,5 +1,6 @@
|
||||
import cacheManager from 'cache-manager';
|
||||
import mangodbStore from 'cache-manager-mongodb';
|
||||
import { cacheConfig } from './config.js';
|
||||
import { isStaticUrl } from '../moch/static.js';
|
||||
|
||||
const GLOBAL_KEY_PREFIX = 'knightcrawler-addon';
|
||||
@@ -14,19 +15,16 @@ const AVAILABILITY_EMPTY_TTL = 30 * 60; // 30 minutes
|
||||
const MESSAGE_VIDEO_URL_TTL = 60; // 1 minutes
|
||||
// When the streams are empty we want to cache it for less time in case of timeouts or failures
|
||||
|
||||
const MONGO_URI = process.env.MONGODB_URI;
|
||||
const NO_CACHE = process.env.NO_CACHE || false;
|
||||
|
||||
const memoryCache = initiateMemoryCache();
|
||||
const remoteCache = initiateRemoteCache();
|
||||
|
||||
function initiateRemoteCache() {
|
||||
if (NO_CACHE) {
|
||||
if (cacheConfig.NO_CACHE) {
|
||||
return null;
|
||||
} else if (MONGO_URI) {
|
||||
} else if (cacheConfig.MONGO_URI) {
|
||||
return cacheManager.caching({
|
||||
store: mangodbStore,
|
||||
uri: MONGO_URI,
|
||||
uri: cacheConfig.MONGO_URI,
|
||||
options: {
|
||||
collection: 'knightcrawler_addon_collection',
|
||||
socketTimeoutMS: 120000,
|
||||
@@ -54,7 +52,7 @@ function initiateMemoryCache() {
|
||||
}
|
||||
|
||||
function cacheWrap(cache, key, method, options) {
|
||||
if (NO_CACHE || !cache) {
|
||||
if (cacheConfig.NO_CACHE || !cache) {
|
||||
return method();
|
||||
}
|
||||
return cache.wrap(key, method, options);
|
||||
|
||||
38
src/node/addon/src/lib/config.js
Normal file
38
src/node/addon/src/lib/config.js
Normal file
@@ -0,0 +1,38 @@
|
||||
export const cacheConfig = {
|
||||
MONGODB_HOST: process.env.MONGODB_HOST || 'mongodb',
|
||||
MONGODB_PORT: process.env.MONGODB_PORT || '27017',
|
||||
MONGODB_DB: process.env.MONGODB_DB || 'selfhostio',
|
||||
MONGO_INITDB_ROOT_USERNAME: process.env.MONGO_INITDB_ROOT_USERNAME || 'mongo',
|
||||
MONGO_INITDB_ROOT_PASSWORD: process.env.MONGO_INITDB_ROOT_PASSWORD || 'mongo',
|
||||
COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'selfhostio_consumer_collection',
|
||||
NO_CACHE: parseBool(process.env.NO_CACHE, false),
|
||||
}
|
||||
|
||||
// Combine the environment variables into a connection string
|
||||
// The combined string will look something like:
|
||||
// 'mongodb://mongo:mongo@localhost:27017/selfhostio?authSource=admin'
|
||||
cacheConfig.MONGO_URI = 'mongodb://' + cacheConfig.MONGO_INITDB_ROOT_USERNAME + ':' + cacheConfig.MONGO_INITDB_ROOT_PASSWORD + '@' + cacheConfig.MONGODB_HOST + ':' + cacheConfig.MONGODB_PORT + '/' + cacheConfig.MONGODB_DB + '?authSource=admin';
|
||||
|
||||
export const databaseConfig = {
|
||||
POSTGRES_HOST: process.env.POSTGRES_HOST || 'postgres',
|
||||
POSTGRES_PORT: process.env.POSTGRES_PORT || '5432',
|
||||
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE || 'selfhostio',
|
||||
POSTGRES_USERNAME: process.env.POSTGRES_USERNAME || 'postgres',
|
||||
POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD || 'postgres',
|
||||
}
|
||||
|
||||
// Combine the environment variables into a connection string
|
||||
// The combined string will look something like:
|
||||
// 'postgres://postgres:postgres@localhost:5432/selfhostio'
|
||||
databaseConfig.POSTGRES_URI = 'postgres://' + databaseConfig.POSTGRES_USERNAME + ':' + databaseConfig.POSTGRES_PASSWORD + '@' + databaseConfig.POSTGRES_HOST + ':' + databaseConfig.POSTGRES_PORT + '/' + databaseConfig.POSTGRES_DATABASE;
|
||||
|
||||
|
||||
function parseBool(boolString, defaultValue) {
|
||||
const isString = typeof boolString === 'string' || boolString instanceof String;
|
||||
|
||||
if (!isString) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
return boolString.toLowerCase() === 'true' ? true : defaultValue;
|
||||
}
|
||||
@@ -1,10 +1,14 @@
|
||||
import { Sequelize } from 'sequelize';
|
||||
import { databaseConfig } from './config.js';
|
||||
|
||||
const { Op } = Sequelize;
|
||||
|
||||
const DATABASE_URI = process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/postgres';
|
||||
|
||||
const database = new Sequelize(DATABASE_URI, { logging: false });
|
||||
const database = new Sequelize(
|
||||
databaseConfig.POSTGRES_URI,
|
||||
{
|
||||
logging: false
|
||||
}
|
||||
);
|
||||
|
||||
const Torrent = database.define('torrent',
|
||||
{
|
||||
|
||||
@@ -4,16 +4,34 @@
|
||||
}
|
||||
|
||||
export const cacheConfig = {
|
||||
MONGO_URI: process.env.MONGODB_URI || 'mongodb://mongo:mongo@localhost:27017/knightcrawler?authSource=admin',
|
||||
MONGODB_HOST: process.env.MONGODB_HOST || 'mongodb',
|
||||
MONGODB_PORT: process.env.MONGODB_PORT || '27017',
|
||||
MONGODB_DB: process.env.MONGODB_DB || 'knightcrawler',
|
||||
MONGO_INITDB_ROOT_USERNAME: process.env.MONGO_INITDB_ROOT_USERNAME || 'mongo',
|
||||
MONGO_INITDB_ROOT_PASSWORD: process.env.MONGO_INITDB_ROOT_PASSWORD || 'mongo',
|
||||
NO_CACHE: parseBool(process.env.NO_CACHE, false),
|
||||
COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'knightcrawler_consumer_collection'
|
||||
}
|
||||
|
||||
// Combine the environment variables into a connection string
|
||||
// The combined string will look something like:
|
||||
// 'mongodb://mongo:mongo@localhost:27017/knightcrawler?authSource=admin'
|
||||
cacheConfig.MONGO_URI = 'mongodb://' + cacheConfig.MONGO_INITDB_ROOT_USERNAME + ':' + cacheConfig.MONGO_INITDB_ROOT_PASSWORD + '@' + cacheConfig.MONGODB_HOST + ':' + cacheConfig.MONGODB_PORT + '/' + cacheConfig.MONGODB_DB + '?authSource=admin';
|
||||
|
||||
export const databaseConfig = {
|
||||
DATABASE_URI: process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/knightcrawler',
|
||||
ENABLE_SYNC: parseBool(process.env.ENABLE_SYNC, true)
|
||||
POSTGRES_HOST: process.env.POSTGRES_HOST || 'postgres',
|
||||
POSTGRES_PORT: process.env.POSTGRES_PORT || '5432',
|
||||
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE || 'knightcrawler',
|
||||
POSTGRES_USERNAME: process.env.POSTGRES_USERNAME || 'postgres',
|
||||
POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD || 'postgres',
|
||||
ENABLE_SYNC: true
|
||||
}
|
||||
|
||||
// Combine the environment variables into a connection string
|
||||
// The combined string will look something like:
|
||||
// 'postgres://postgres:postgres@localhost:5432/knightcrawler'
|
||||
databaseConfig.POSTGRES_URI = 'postgres://' + databaseConfig.POSTGRES_USERNAME + ':' + databaseConfig.POSTGRES_PASSWORD + '@' + databaseConfig.POSTGRES_HOST + ':' + databaseConfig.POSTGRES_PORT + '/' + databaseConfig.POSTGRES_DATABASE;
|
||||
|
||||
export const jobConfig = {
|
||||
JOB_CONCURRENCY: parseInt(process.env.JOB_CONCURRENCY || 1),
|
||||
JOBS_ENABLED: parseBool(process.env.JOBS_ENABLED || true)
|
||||
|
||||
@@ -5,7 +5,7 @@ import {logger} from "./logger.js";
|
||||
import * as Promises from './promises.js';
|
||||
|
||||
const database = new Sequelize(
|
||||
databaseConfig.DATABASE_URI,
|
||||
databaseConfig.POSTGRES_URI,
|
||||
{
|
||||
logging: false
|
||||
}
|
||||
@@ -30,7 +30,8 @@ const IngestedTorrent = database.define('ingested_torrent', {
|
||||
processed: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
defaultValue: false
|
||||
}},
|
||||
}
|
||||
},
|
||||
{
|
||||
indexes: [
|
||||
{
|
||||
@@ -186,7 +187,7 @@ export function connect() {
|
||||
if (databaseConfig.ENABLE_SYNC) {
|
||||
return database.sync({ alter: true })
|
||||
.catch(error => {
|
||||
logger.error('Failed syncing database: ', error);
|
||||
console.error('Failed syncing database: ', error);
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
{
|
||||
"ScrapeConfiguration": {
|
||||
"StorageConnectionString": "",
|
||||
"Scrapers": [
|
||||
{
|
||||
"Name": "SyncEzTvJob",
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using Producer.Models.Configuration;
|
||||
|
||||
namespace Producer.Crawlers.Sites;
|
||||
|
||||
public partial class DebridMediaManagerCrawler(
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using Producer.Models.Configuration;
|
||||
|
||||
namespace Producer.Extensions;
|
||||
|
||||
public static class ConfigurationExtensions
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using Producer.Models.Configuration;
|
||||
|
||||
namespace Producer.Extensions;
|
||||
|
||||
public static class ServiceCollectionExtensions
|
||||
@@ -20,6 +22,7 @@ public static class ServiceCollectionExtensions
|
||||
|
||||
internal static IServiceCollection AddDataStorage(this IServiceCollection services)
|
||||
{
|
||||
services.LoadConfigurationFromEnv<PostgresConfiguration>();
|
||||
services.AddTransient<IDataStorage, DapperDataStorage>();
|
||||
services.AddTransient<IMessagePublisher, TorrentPublisher>();
|
||||
return services;
|
||||
@@ -36,9 +39,9 @@ public static class ServiceCollectionExtensions
|
||||
services.AddMassTransit(busConfigurator =>
|
||||
{
|
||||
busConfigurator.SetKebabCaseEndpointNameFormatter();
|
||||
busConfigurator.UsingRabbitMq((context, busFactoryConfigurator) =>
|
||||
busConfigurator.UsingRabbitMq((_, busFactoryConfigurator) =>
|
||||
{
|
||||
busFactoryConfigurator.Host(rabbitConfig!.Host, hostConfigurator =>
|
||||
busFactoryConfigurator.Host(rabbitConfig.Host, hostConfigurator =>
|
||||
{
|
||||
hostConfigurator.Username(rabbitConfig.Username);
|
||||
hostConfigurator.Password(rabbitConfig.Password);
|
||||
@@ -51,9 +54,9 @@ public static class ServiceCollectionExtensions
|
||||
|
||||
internal static IServiceCollection AddQuartz(this IServiceCollection services, IConfiguration configuration)
|
||||
{
|
||||
var scrapeConfiguration = LoadScrapeConfiguration(services, configuration);
|
||||
var githubConfiguration = LoadGithubConfiguration(services, configuration);
|
||||
var rabbitConfig = LoadRabbitMQConfiguration(services, configuration);
|
||||
var scrapeConfiguration = services.LoadConfigurationFromConfig<ScrapeConfiguration>(configuration, ScrapeConfiguration.SectionName);
|
||||
var githubConfiguration = services.LoadConfigurationFromConfig<GithubConfiguration>(configuration, GithubConfiguration.SectionName);
|
||||
var rabbitConfig = services.LoadConfigurationFromConfig<RabbitMqConfiguration>(configuration, RabbitMqConfiguration.SectionName);
|
||||
|
||||
services
|
||||
.AddTransient<SyncEzTvJob>()
|
||||
@@ -93,45 +96,28 @@ public static class ServiceCollectionExtensions
|
||||
return services;
|
||||
}
|
||||
|
||||
private static GithubConfiguration LoadGithubConfiguration(IServiceCollection services, IConfiguration configuration)
|
||||
private static TConfiguration LoadConfigurationFromConfig<TConfiguration>(this IServiceCollection services, IConfiguration configuration, string sectionName)
|
||||
where TConfiguration : class
|
||||
{
|
||||
var githubConfiguration = configuration.GetSection(GithubConfiguration.SectionName).Get<GithubConfiguration>();
|
||||
var instance = configuration.GetSection(sectionName).Get<TConfiguration>();
|
||||
|
||||
ArgumentNullException.ThrowIfNull(githubConfiguration, nameof(githubConfiguration));
|
||||
ArgumentNullException.ThrowIfNull(instance, nameof(instance));
|
||||
|
||||
services.TryAddSingleton(githubConfiguration);
|
||||
services.TryAddSingleton(instance);
|
||||
|
||||
return githubConfiguration;
|
||||
return instance;
|
||||
}
|
||||
|
||||
private static RabbitMqConfiguration LoadRabbitMQConfiguration(IServiceCollection services, IConfiguration configuration)
|
||||
private static TConfiguration LoadConfigurationFromEnv<TConfiguration>(this IServiceCollection services)
|
||||
where TConfiguration : class
|
||||
{
|
||||
var rabbitConfiguration = configuration.GetSection(RabbitMqConfiguration.SectionName).Get<RabbitMqConfiguration>();
|
||||
var instance = Activator.CreateInstance<TConfiguration>();
|
||||
|
||||
ArgumentNullException.ThrowIfNull(rabbitConfiguration, nameof(rabbitConfiguration));
|
||||
ArgumentNullException.ThrowIfNull(instance, nameof(instance));
|
||||
|
||||
if (rabbitConfiguration.MaxQueueSize > 0)
|
||||
{
|
||||
if (rabbitConfiguration.MaxPublishBatchSize > rabbitConfiguration.MaxQueueSize)
|
||||
{
|
||||
throw new InvalidOperationException("MaxPublishBatchSize cannot be greater than MaxQueueSize in RabbitMqConfiguration");
|
||||
}
|
||||
}
|
||||
services.TryAddSingleton(instance);
|
||||
|
||||
services.TryAddSingleton(rabbitConfiguration);
|
||||
|
||||
return rabbitConfiguration;
|
||||
}
|
||||
|
||||
private static ScrapeConfiguration LoadScrapeConfiguration(IServiceCollection services, IConfiguration configuration)
|
||||
{
|
||||
var scrapeConfiguration = configuration.GetSection(ScrapeConfiguration.SectionName).Get<ScrapeConfiguration>();
|
||||
|
||||
ArgumentNullException.ThrowIfNull(scrapeConfiguration, nameof(scrapeConfiguration));
|
||||
|
||||
services.TryAddSingleton(scrapeConfiguration);
|
||||
|
||||
return scrapeConfiguration;
|
||||
return instance;
|
||||
}
|
||||
|
||||
private static void AddJobWithTrigger<TJobType>(
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
global using System.Text;
|
||||
global using System.Text.Json;
|
||||
global using System.Text.Json.Serialization;
|
||||
global using System.Text.RegularExpressions;
|
||||
global using System.Xml.Linq;
|
||||
global using Dapper;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
namespace Producer.Models;
|
||||
namespace Producer.Models.Configuration;
|
||||
|
||||
public class GithubConfiguration
|
||||
{
|
||||
29
src/producer/Models/Configuration/PostgresConfiguration.cs
Normal file
29
src/producer/Models/Configuration/PostgresConfiguration.cs
Normal file
@@ -0,0 +1,29 @@
|
||||
namespace Producer.Models.Configuration;
|
||||
|
||||
public class PostgresConfiguration
|
||||
{
|
||||
private const string Prefix = "POSTGRES";
|
||||
private const string HostVariable = "HOST";
|
||||
private const string UsernameVariable = "USER";
|
||||
private const string PasswordVariable = "PASSWORD";
|
||||
private const string DatabaseVariable = "DB";
|
||||
private const string PortVariable = "PORT";
|
||||
|
||||
private string Host { get; init; } = Environment.GetEnvironmentVariable($"{Prefix}_{HostVariable}") ??
|
||||
throw new InvalidOperationException($"Environment variable {Prefix}_{HostVariable} is not set");
|
||||
|
||||
private string Username { get; init; } = Environment.GetEnvironmentVariable($"{Prefix}_{UsernameVariable}") ??
|
||||
throw new InvalidOperationException($"Environment variable {Prefix}_{UsernameVariable} is not set");
|
||||
|
||||
private string Password { get; init; } = Environment.GetEnvironmentVariable($"{Prefix}_{PasswordVariable}") ??
|
||||
throw new InvalidOperationException($"Environment variable {Prefix}_{PasswordVariable} is not set");
|
||||
|
||||
private string Database { get; init; } = Environment.GetEnvironmentVariable($"{Prefix}_{DatabaseVariable}") ??
|
||||
throw new InvalidOperationException($"Environment variable {Prefix}_{DatabaseVariable} is not set");
|
||||
|
||||
private int PORT { get; init; } = int.Parse(
|
||||
Environment.GetEnvironmentVariable($"{Prefix}_{PortVariable}") ??
|
||||
throw new InvalidOperationException($"Environment variable {Prefix}_{PortVariable} is not set"));
|
||||
|
||||
public string StorageConnectionString => $"Host={Host};Port={PORT};Username={Username};Password={Password};Database={Database};";
|
||||
}
|
||||
39
src/producer/Models/Configuration/RabbitMqConfiguration.cs
Normal file
39
src/producer/Models/Configuration/RabbitMqConfiguration.cs
Normal file
@@ -0,0 +1,39 @@
|
||||
namespace Producer.Models.Configuration;
|
||||
|
||||
public class RabbitMqConfiguration
|
||||
{
|
||||
public const string SectionName = "RabbitMqConfiguration";
|
||||
public const string Filename = "rabbitmq.json";
|
||||
|
||||
public string? Host { get; set; }
|
||||
public string? Username { get; set; }
|
||||
public string? Password { get; set; }
|
||||
public string? QueueName { get; set; }
|
||||
public bool Durable { get; set; }
|
||||
public int MaxQueueSize { get; set; }
|
||||
public int MaxPublishBatchSize { get; set; } = 500;
|
||||
public int PublishIntervalInSeconds { get; set; } = 1000 * 10;
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
if (MaxQueueSize == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (MaxQueueSize < 0)
|
||||
{
|
||||
throw new InvalidOperationException("MaxQueueSize cannot be less than 0 in RabbitMqConfiguration");
|
||||
}
|
||||
|
||||
if (MaxPublishBatchSize < 0)
|
||||
{
|
||||
throw new InvalidOperationException("MaxPublishBatchSize cannot be less than 0 in RabbitMqConfiguration");
|
||||
}
|
||||
|
||||
if (MaxPublishBatchSize > MaxQueueSize)
|
||||
{
|
||||
throw new InvalidOperationException("MaxPublishBatchSize cannot be greater than MaxQueueSize in RabbitMqConfiguration");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
namespace Producer.Models;
|
||||
namespace Producer.Models.Configuration;
|
||||
|
||||
public class ScrapeConfiguration
|
||||
{
|
||||
@@ -6,5 +6,4 @@ public class ScrapeConfiguration
|
||||
public const string Filename = "scrapers.json";
|
||||
|
||||
public List<Scraper> Scrapers { get; set; } = [];
|
||||
public string StorageConnectionString { get; set; } = "";
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
namespace Producer.Models;
|
||||
|
||||
public class RabbitMqConfiguration
|
||||
{
|
||||
public const string SectionName = "RabbitMqConfiguration";
|
||||
public const string Filename = "rabbitmq.json";
|
||||
|
||||
public string? Host { get; set; }
|
||||
public string? Username { get; set; }
|
||||
public string? Password { get; set; }
|
||||
public string? QueueName { get; set; }
|
||||
public bool Durable { get; set; }
|
||||
public int MaxQueueSize { get; set; }
|
||||
public int MaxPublishBatchSize { get; set; } = 500;
|
||||
public int PublishIntervalInSeconds { get; set; } = 1000 * 10;
|
||||
}
|
||||
@@ -40,6 +40,7 @@
|
||||
<None Include="Configuration\github.json">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<Content Remove="Configuration\postgres.json" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
using Producer.Models.Configuration;
|
||||
|
||||
namespace Producer.Services;
|
||||
|
||||
public class DapperDataStorage(ScrapeConfiguration configuration, RabbitMqConfiguration rabbitConfig, ILogger<DapperDataStorage> logger) : IDataStorage
|
||||
public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConfiguration rabbitConfig, ILogger<DapperDataStorage> logger) : IDataStorage
|
||||
{
|
||||
private const string InsertTorrentSql =
|
||||
"""
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
namespace Producer.Services;
|
||||
using Producer.Models.Configuration;
|
||||
|
||||
namespace Producer.Services;
|
||||
|
||||
public class TorrentPublisher(
|
||||
ISendEndpointProvider sendEndpointProvider,
|
||||
|
||||
Reference in New Issue
Block a user