From 68edaba30855a6bb7d2e0a948ec4da658a7cde45 Mon Sep 17 00:00:00 2001 From: iPromKnight Date: Fri, 2 Feb 2024 13:49:42 +0000 Subject: [PATCH] Introduce max batch size, and configurable publish window Still need to implement queue size limit Also fixes env var consistency between addon and consumer --- env/consumer.env | 2 +- env/producer.env | 6 ++++++ src/node/consumer/lib/config.js | 2 +- src/producer/Configuration/rabbitmq.json | 5 ++++- src/producer/Crawlers/BaseCrawler.cs | 2 +- src/producer/Crawlers/BaseJsonCrawler.cs | 2 +- src/producer/Crawlers/BaseXmlCrawler.cs | 2 +- src/producer/Crawlers/CrawlerProvider.cs | 2 +- .../Sites/DebridMediaManagerCrawler.cs | 3 ++- src/producer/Crawlers/Sites/EzTvCrawler.cs | 2 +- src/producer/Crawlers/Sites/TgxCrawler.cs | 2 +- src/producer/Crawlers/Sites/TpbCrawler.cs | 2 +- src/producer/Crawlers/Sites/YtsCrawler.cs | 2 +- .../Extensions/ConfigurationExtensions.cs | 2 +- .../ConfigureHostBuilderExtensions.cs | 2 +- .../Extensions/ServiceCollectionExtensions.cs | 20 +++++++++++++++---- src/producer/GlobalUsings.cs | 14 ++++++------- src/producer/Interfaces/ICrawler.cs | 2 +- src/producer/Interfaces/ICrawlerProvider.cs | 2 +- src/producer/Interfaces/IDataStorage.cs | 2 +- src/producer/Interfaces/IIpService.cs | 2 +- src/producer/Interfaces/IMessagePublisher.cs | 2 +- src/producer/Jobs/BaseJob.cs | 2 +- src/producer/Jobs/ICrawlerJob.cs | 2 +- src/producer/Jobs/IPJob.cs | 2 +- src/producer/Jobs/PublisherJob.cs | 2 +- src/producer/Jobs/SyncDmmJob.cs | 4 +++- src/producer/Jobs/SyncEzTvJob.cs | 4 +++- src/producer/Jobs/SyncTgxJob.cs | 4 +++- src/producer/Jobs/SyncTpbJob.cs | 4 +++- src/producer/Jobs/SyncYtsJob.cs | 4 +++- src/producer/Models/GithubConfiguration.cs | 2 +- src/producer/Models/RabbitMqConfiguration.cs | 5 ++++- src/producer/Models/Results.cs | 2 +- src/producer/Models/ScrapeConfiguration.cs | 2 +- src/producer/Models/Scraper.cs | 2 +- src/producer/Models/Torrent.cs | 2 +- src/producer/Program.cs | 4 +++- src/producer/Services/DapperDataStorage.cs | 6 +++--- src/producer/Services/IpService.cs | 2 +- src/producer/Services/TorrentPublisher.cs | 2 +- 41 files changed, 88 insertions(+), 51 deletions(-) diff --git a/env/consumer.env b/env/consumer.env index 800d656..3ea17a8 100644 --- a/env/consumer.env +++ b/env/consumer.env @@ -1,6 +1,6 @@ TZ=London/Europe MONGODB_URI=mongodb://mongo:mongo@mongodb/selfhostio?tls=false&authSource=admin -POSTGRES_DATABASE_URI=postgres://postgres:postgres@postgres/selfhostio +DATABASE_URI=postgres://postgres:postgres@postgres/selfhostio RABBIT_URI=amqp://guest:guest@rabbitmq:5672/?heartbeat=30 QUEUE_NAME=ingested JOB_CONCURRENCY=5 diff --git a/env/producer.env b/env/producer.env index 9a33630..c0ac48b 100644 --- a/env/producer.env +++ b/env/producer.env @@ -1,4 +1,10 @@ ScrapeConfiguration__StorageConnectionString=host=postgres;username=postgres;password=postgres;database=selfhostio; RabbitMqConfiguration__Host=rabbitmq RabbitMqConfiguration__QueueName=ingested +RabbitMqConfiguration__Username=guest +RabbitMqConfiguration__Password=guest +RabbitMqConfiguration__Durable=true +RabbitMqConfiguration__MaxQueueSize=1000 +RabbitMqConfiguration__MaxPublishBatchSize=100 +RabbitMqConfiguration__PublishIntervalInSeconds=10 GithubSettings__PAT= \ No newline at end of file diff --git a/src/node/consumer/lib/config.js b/src/node/consumer/lib/config.js index ecd7d54..9ad6491 100644 --- a/src/node/consumer/lib/config.js +++ b/src/node/consumer/lib/config.js @@ -10,7 +10,7 @@ export const cacheConfig = { } export const databaseConfig = { - DATABASE_URI: process.env.POSTGRES_DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/selfhostio', + DATABASE_URI: process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/selfhostio', ENABLE_SYNC: parseBool(process.env.ENABLE_SYNC, true) } diff --git a/src/producer/Configuration/rabbitmq.json b/src/producer/Configuration/rabbitmq.json index 632af04..11b0d91 100644 --- a/src/producer/Configuration/rabbitmq.json +++ b/src/producer/Configuration/rabbitmq.json @@ -4,6 +4,9 @@ "Username": "guest", "Password": "guest", "QueueName": "test-queue", - "Durable": true + "Durable": true, + "MaxQueueSize": 1000, + "MaxPublishBatchSize": 100, + "PublishIntervalInSeconds": 10 } } \ No newline at end of file diff --git a/src/producer/Crawlers/BaseCrawler.cs b/src/producer/Crawlers/BaseCrawler.cs index bf33c2e..2184b16 100644 --- a/src/producer/Crawlers/BaseCrawler.cs +++ b/src/producer/Crawlers/BaseCrawler.cs @@ -1,4 +1,4 @@ -namespace Scraper.Crawlers; +namespace Producer.Crawlers; public abstract class BaseCrawler(ILogger logger, IDataStorage storage) : ICrawler { diff --git a/src/producer/Crawlers/BaseJsonCrawler.cs b/src/producer/Crawlers/BaseJsonCrawler.cs index 327b8ec..1af5267 100644 --- a/src/producer/Crawlers/BaseJsonCrawler.cs +++ b/src/producer/Crawlers/BaseJsonCrawler.cs @@ -1,4 +1,4 @@ -namespace Scraper.Crawlers; +namespace Producer.Crawlers; public abstract class BaseJsonCrawler(IHttpClientFactory httpClientFactory, ILogger logger, IDataStorage storage) : BaseCrawler(logger, storage) { diff --git a/src/producer/Crawlers/BaseXmlCrawler.cs b/src/producer/Crawlers/BaseXmlCrawler.cs index 2d83d21..2fd1c8c 100644 --- a/src/producer/Crawlers/BaseXmlCrawler.cs +++ b/src/producer/Crawlers/BaseXmlCrawler.cs @@ -1,4 +1,4 @@ -namespace Scraper.Crawlers; +namespace Producer.Crawlers; public abstract class BaseXmlCrawler(IHttpClientFactory httpClientFactory, ILogger logger, IDataStorage storage) : BaseCrawler(logger, storage) { diff --git a/src/producer/Crawlers/CrawlerProvider.cs b/src/producer/Crawlers/CrawlerProvider.cs index b84ce14..af54a62 100644 --- a/src/producer/Crawlers/CrawlerProvider.cs +++ b/src/producer/Crawlers/CrawlerProvider.cs @@ -1,4 +1,4 @@ -namespace Scraper.Crawlers; +namespace Producer.Crawlers; public class CrawlerProvider(IServiceProvider serviceProvider) : ICrawlerProvider { diff --git a/src/producer/Crawlers/Sites/DebridMediaManagerCrawler.cs b/src/producer/Crawlers/Sites/DebridMediaManagerCrawler.cs index a3d5c83..dabb738 100644 --- a/src/producer/Crawlers/Sites/DebridMediaManagerCrawler.cs +++ b/src/producer/Crawlers/Sites/DebridMediaManagerCrawler.cs @@ -1,4 +1,4 @@ -namespace Scraper.Crawlers.Sites; +namespace Producer.Crawlers.Sites; public partial class DebridMediaManagerCrawler( IHttpClientFactory httpClientFactory, @@ -83,6 +83,7 @@ public partial class DebridMediaManagerCrawler( if (!result.Success) { logger.LogWarning("Failed to mark page as ingested: [{Error}]", result.ErrorMessage); + return; } logger.LogInformation("Successfully marked page as ingested"); diff --git a/src/producer/Crawlers/Sites/EzTvCrawler.cs b/src/producer/Crawlers/Sites/EzTvCrawler.cs index 25de36f..8de23ba 100644 --- a/src/producer/Crawlers/Sites/EzTvCrawler.cs +++ b/src/producer/Crawlers/Sites/EzTvCrawler.cs @@ -1,4 +1,4 @@ -namespace Scraper.Crawlers.Sites; +namespace Producer.Crawlers.Sites; public class EzTvCrawler(IHttpClientFactory httpClientFactory, ILogger logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage) { diff --git a/src/producer/Crawlers/Sites/TgxCrawler.cs b/src/producer/Crawlers/Sites/TgxCrawler.cs index 418cdd2..036a403 100644 --- a/src/producer/Crawlers/Sites/TgxCrawler.cs +++ b/src/producer/Crawlers/Sites/TgxCrawler.cs @@ -1,4 +1,4 @@ -namespace Scraper.Crawlers.Sites; +namespace Producer.Crawlers.Sites; public partial class TgxCrawler(IHttpClientFactory httpClientFactory, ILogger logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage) { diff --git a/src/producer/Crawlers/Sites/TpbCrawler.cs b/src/producer/Crawlers/Sites/TpbCrawler.cs index badd000..c0cba00 100644 --- a/src/producer/Crawlers/Sites/TpbCrawler.cs +++ b/src/producer/Crawlers/Sites/TpbCrawler.cs @@ -1,4 +1,4 @@ -namespace Scraper.Crawlers.Sites; +namespace Producer.Crawlers.Sites; public class TpbCrawler(IHttpClientFactory httpClientFactory, ILogger logger, IDataStorage storage) : BaseJsonCrawler(httpClientFactory, logger, storage) { diff --git a/src/producer/Crawlers/Sites/YtsCrawler.cs b/src/producer/Crawlers/Sites/YtsCrawler.cs index 9fbbbe0..d47ff39 100644 --- a/src/producer/Crawlers/Sites/YtsCrawler.cs +++ b/src/producer/Crawlers/Sites/YtsCrawler.cs @@ -1,4 +1,4 @@ -namespace Scraper.Crawlers.Sites; +namespace Producer.Crawlers.Sites; public class YtsCrawler(IHttpClientFactory httpClientFactory, ILogger logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage) { diff --git a/src/producer/Extensions/ConfigurationExtensions.cs b/src/producer/Extensions/ConfigurationExtensions.cs index 201c998..b321cab 100644 --- a/src/producer/Extensions/ConfigurationExtensions.cs +++ b/src/producer/Extensions/ConfigurationExtensions.cs @@ -1,4 +1,4 @@ -namespace Scraper.Extensions; +namespace Producer.Extensions; public static class ConfigurationExtensions { diff --git a/src/producer/Extensions/ConfigureHostBuilderExtensions.cs b/src/producer/Extensions/ConfigureHostBuilderExtensions.cs index 7a3601e..4455a90 100644 --- a/src/producer/Extensions/ConfigureHostBuilderExtensions.cs +++ b/src/producer/Extensions/ConfigureHostBuilderExtensions.cs @@ -1,4 +1,4 @@ -namespace Scraper.Extensions; +namespace Producer.Extensions; internal static class ConfigureHostBuilderExtensions { diff --git a/src/producer/Extensions/ServiceCollectionExtensions.cs b/src/producer/Extensions/ServiceCollectionExtensions.cs index e8406cd..1fbeddb 100644 --- a/src/producer/Extensions/ServiceCollectionExtensions.cs +++ b/src/producer/Extensions/ServiceCollectionExtensions.cs @@ -1,4 +1,4 @@ -namespace Scraper.Extensions; +namespace Producer.Extensions; public static class ServiceCollectionExtensions { @@ -53,6 +53,7 @@ public static class ServiceCollectionExtensions { var scrapeConfiguration = LoadScrapeConfiguration(services, configuration); var githubConfiguration = LoadGithubConfiguration(services, configuration); + var rabbitConfig = LoadRabbitMQConfiguration(services, configuration); services .AddTransient() @@ -75,7 +76,7 @@ public static class ServiceCollectionExtensions AddJobWithTrigger(quartz, SyncYtsJob.Key, SyncYtsJob.Trigger, scrapeConfiguration); AddJobWithTrigger(quartz, SyncTgxJob.Key, SyncTgxJob.Trigger, scrapeConfiguration); AddJobWithTrigger(quartz, IPJob.Key, IPJob.Trigger, 60 * 5); - AddJobWithTrigger(quartz, PublisherJob.Key, PublisherJob.Trigger, 10); + AddJobWithTrigger(quartz, PublisherJob.Key, PublisherJob.Trigger, rabbitConfig.PublishIntervalInSeconds); if (!string.IsNullOrEmpty(githubConfiguration.PAT)) { @@ -98,10 +99,21 @@ public static class ServiceCollectionExtensions ArgumentNullException.ThrowIfNull(githubConfiguration, nameof(githubConfiguration)); - services.AddSingleton(githubConfiguration); + services.TryAddSingleton(githubConfiguration); return githubConfiguration; } + + private static RabbitMqConfiguration LoadRabbitMQConfiguration(IServiceCollection services, IConfiguration configuration) + { + var rabbitConfiguration = configuration.GetSection(RabbitMqConfiguration.SectionName).Get(); + + ArgumentNullException.ThrowIfNull(rabbitConfiguration, nameof(rabbitConfiguration)); + + services.TryAddSingleton(rabbitConfiguration); + + return rabbitConfiguration; + } private static ScrapeConfiguration LoadScrapeConfiguration(IServiceCollection services, IConfiguration configuration) { @@ -109,7 +121,7 @@ public static class ServiceCollectionExtensions ArgumentNullException.ThrowIfNull(scrapeConfiguration, nameof(scrapeConfiguration)); - services.AddSingleton(scrapeConfiguration); + services.TryAddSingleton(scrapeConfiguration); return scrapeConfiguration; } diff --git a/src/producer/GlobalUsings.cs b/src/producer/GlobalUsings.cs index 2c9d39e..1a7749d 100644 --- a/src/producer/GlobalUsings.cs +++ b/src/producer/GlobalUsings.cs @@ -9,14 +9,14 @@ global using LZStringCSharp; global using MassTransit; global using Microsoft.AspNetCore.Builder; global using Microsoft.Extensions.DependencyInjection; +global using Microsoft.Extensions.DependencyInjection.Extensions; global using Microsoft.Extensions.Logging; global using Npgsql; global using Quartz; -global using Scraper.Crawlers; -global using Scraper.Crawlers.Sites; -global using Scraper.Extensions; -global using Scraper.Interfaces; -global using Scraper.Jobs; -global using Scraper.Models; -global using Scraper.Services; +global using Producer.Crawlers; +global using Producer.Crawlers.Sites; +global using Producer.Interfaces; +global using Producer.Jobs; +global using Producer.Models; +global using Producer.Services; global using Serilog; \ No newline at end of file diff --git a/src/producer/Interfaces/ICrawler.cs b/src/producer/Interfaces/ICrawler.cs index 8fdac6f..225db34 100644 --- a/src/producer/Interfaces/ICrawler.cs +++ b/src/producer/Interfaces/ICrawler.cs @@ -1,4 +1,4 @@ -namespace Scraper.Interfaces; +namespace Producer.Interfaces; public interface ICrawler { diff --git a/src/producer/Interfaces/ICrawlerProvider.cs b/src/producer/Interfaces/ICrawlerProvider.cs index 81c2832..da111f2 100644 --- a/src/producer/Interfaces/ICrawlerProvider.cs +++ b/src/producer/Interfaces/ICrawlerProvider.cs @@ -1,4 +1,4 @@ -namespace Scraper.Interfaces; +namespace Producer.Interfaces; public interface ICrawlerProvider { diff --git a/src/producer/Interfaces/IDataStorage.cs b/src/producer/Interfaces/IDataStorage.cs index cb28191..7286454 100644 --- a/src/producer/Interfaces/IDataStorage.cs +++ b/src/producer/Interfaces/IDataStorage.cs @@ -1,4 +1,4 @@ -namespace Scraper.Interfaces; +namespace Producer.Interfaces; public interface IDataStorage { diff --git a/src/producer/Interfaces/IIpService.cs b/src/producer/Interfaces/IIpService.cs index 1c1f711..69f36d7 100644 --- a/src/producer/Interfaces/IIpService.cs +++ b/src/producer/Interfaces/IIpService.cs @@ -1,4 +1,4 @@ -namespace Scraper.Interfaces; +namespace Producer.Interfaces; public interface IIpService { diff --git a/src/producer/Interfaces/IMessagePublisher.cs b/src/producer/Interfaces/IMessagePublisher.cs index 0eb2bdb..c6444ab 100644 --- a/src/producer/Interfaces/IMessagePublisher.cs +++ b/src/producer/Interfaces/IMessagePublisher.cs @@ -1,4 +1,4 @@ -namespace Scraper.Interfaces; +namespace Producer.Interfaces; public interface IMessagePublisher { diff --git a/src/producer/Jobs/BaseJob.cs b/src/producer/Jobs/BaseJob.cs index 5745801..9dcdc96 100644 --- a/src/producer/Jobs/BaseJob.cs +++ b/src/producer/Jobs/BaseJob.cs @@ -1,4 +1,4 @@ -namespace Scraper.Jobs; +namespace Producer.Jobs; public abstract class BaseJob(ICrawlerProvider crawlerProvider) : IJob { diff --git a/src/producer/Jobs/ICrawlerJob.cs b/src/producer/Jobs/ICrawlerJob.cs index 7b1429b..bd857ba 100644 --- a/src/producer/Jobs/ICrawlerJob.cs +++ b/src/producer/Jobs/ICrawlerJob.cs @@ -1,4 +1,4 @@ -namespace Scraper.Jobs; +namespace Producer.Jobs; public interface ICrawlerJob : IJob where TCrawler : ICrawler diff --git a/src/producer/Jobs/IPJob.cs b/src/producer/Jobs/IPJob.cs index 1619caf..3bc93f3 100644 --- a/src/producer/Jobs/IPJob.cs +++ b/src/producer/Jobs/IPJob.cs @@ -1,4 +1,4 @@ -namespace Scraper.Jobs; +namespace Producer.Jobs; [DisallowConcurrentExecution] public class IPJob(IIpService ipService) : IJob diff --git a/src/producer/Jobs/PublisherJob.cs b/src/producer/Jobs/PublisherJob.cs index 83c285f..e6ca3ad 100644 --- a/src/producer/Jobs/PublisherJob.cs +++ b/src/producer/Jobs/PublisherJob.cs @@ -1,4 +1,4 @@ -namespace Scraper.Jobs; +namespace Producer.Jobs; [DisallowConcurrentExecution] public class PublisherJob(IMessagePublisher publisher, IDataStorage storage, ILogger logger) : IJob diff --git a/src/producer/Jobs/SyncDmmJob.cs b/src/producer/Jobs/SyncDmmJob.cs index fbba396..6b57a74 100644 --- a/src/producer/Jobs/SyncDmmJob.cs +++ b/src/producer/Jobs/SyncDmmJob.cs @@ -1,4 +1,6 @@ -namespace Scraper.Jobs; +using DebridMediaManagerCrawler = Producer.Crawlers.Sites.DebridMediaManagerCrawler; + +namespace Producer.Jobs; [DisallowConcurrentExecution] public class SyncDmmJob(ICrawlerProvider crawlerProvider) : BaseJob(crawlerProvider) diff --git a/src/producer/Jobs/SyncEzTvJob.cs b/src/producer/Jobs/SyncEzTvJob.cs index c8d7bec..dde73d5 100644 --- a/src/producer/Jobs/SyncEzTvJob.cs +++ b/src/producer/Jobs/SyncEzTvJob.cs @@ -1,4 +1,6 @@ -namespace Scraper.Jobs; +using Producer.Crawlers.Sites; + +namespace Producer.Jobs; [DisallowConcurrentExecution] public class SyncEzTvJob(ICrawlerProvider crawlerProvider) : BaseJob(crawlerProvider) diff --git a/src/producer/Jobs/SyncTgxJob.cs b/src/producer/Jobs/SyncTgxJob.cs index 60dd908..4c4fac9 100644 --- a/src/producer/Jobs/SyncTgxJob.cs +++ b/src/producer/Jobs/SyncTgxJob.cs @@ -1,4 +1,6 @@ -namespace Scraper.Jobs; +using TgxCrawler = Producer.Crawlers.Sites.TgxCrawler; + +namespace Producer.Jobs; [DisallowConcurrentExecution] public class SyncTgxJob(ICrawlerProvider crawlerProvider) : BaseJob(crawlerProvider) diff --git a/src/producer/Jobs/SyncTpbJob.cs b/src/producer/Jobs/SyncTpbJob.cs index 47d80c0..db64032 100644 --- a/src/producer/Jobs/SyncTpbJob.cs +++ b/src/producer/Jobs/SyncTpbJob.cs @@ -1,4 +1,6 @@ -namespace Scraper.Jobs; +using Producer.Crawlers.Sites; + +namespace Producer.Jobs; [DisallowConcurrentExecution] public class SyncTpbJob(ICrawlerProvider crawlerProvider) : BaseJob(crawlerProvider) diff --git a/src/producer/Jobs/SyncYtsJob.cs b/src/producer/Jobs/SyncYtsJob.cs index 2c8fc54..2b4fa63 100644 --- a/src/producer/Jobs/SyncYtsJob.cs +++ b/src/producer/Jobs/SyncYtsJob.cs @@ -1,4 +1,6 @@ -namespace Scraper.Jobs; +using Producer.Crawlers.Sites; + +namespace Producer.Jobs; [DisallowConcurrentExecution] public class SyncYtsJob(ICrawlerProvider crawlerProvider) : BaseJob(crawlerProvider) diff --git a/src/producer/Models/GithubConfiguration.cs b/src/producer/Models/GithubConfiguration.cs index 26d67a7..429b8d0 100644 --- a/src/producer/Models/GithubConfiguration.cs +++ b/src/producer/Models/GithubConfiguration.cs @@ -1,4 +1,4 @@ -namespace Scraper.Models; +namespace Producer.Models; public class GithubConfiguration { diff --git a/src/producer/Models/RabbitMqConfiguration.cs b/src/producer/Models/RabbitMqConfiguration.cs index a7922d2..4b86073 100644 --- a/src/producer/Models/RabbitMqConfiguration.cs +++ b/src/producer/Models/RabbitMqConfiguration.cs @@ -1,4 +1,4 @@ -namespace Scraper.Models; +namespace Producer.Models; public class RabbitMqConfiguration { @@ -10,4 +10,7 @@ public class RabbitMqConfiguration public string? Password { get; set; } public string? QueueName { get; set; } public bool Durable { get; set; } + public int MaxQueueSize { get; set; } = 1000; + public int MaxPublishBatchSize { get; set; } = 100; + public int PublishIntervalInSeconds { get; set; } = 1000 * 10; } \ No newline at end of file diff --git a/src/producer/Models/Results.cs b/src/producer/Models/Results.cs index 6128049..00944f7 100644 --- a/src/producer/Models/Results.cs +++ b/src/producer/Models/Results.cs @@ -1,4 +1,4 @@ -namespace Scraper.Models; +namespace Producer.Models; public record InsertTorrentResult(bool Success, int InsertedCount = 0, string? ErrorMessage = null); public record UpdatedTorrentResult(bool Success, int UpdatedCount = 0, string? ErrorMessage = null); diff --git a/src/producer/Models/ScrapeConfiguration.cs b/src/producer/Models/ScrapeConfiguration.cs index 892f2ce..716e32a 100644 --- a/src/producer/Models/ScrapeConfiguration.cs +++ b/src/producer/Models/ScrapeConfiguration.cs @@ -1,4 +1,4 @@ -namespace Scraper.Models; +namespace Producer.Models; public class ScrapeConfiguration { diff --git a/src/producer/Models/Scraper.cs b/src/producer/Models/Scraper.cs index 2d2084b..f44bf1b 100644 --- a/src/producer/Models/Scraper.cs +++ b/src/producer/Models/Scraper.cs @@ -1,4 +1,4 @@ -namespace Scraper.Models; +namespace Producer.Models; public class Scraper { diff --git a/src/producer/Models/Torrent.cs b/src/producer/Models/Torrent.cs index fd36256..74230f3 100644 --- a/src/producer/Models/Torrent.cs +++ b/src/producer/Models/Torrent.cs @@ -1,4 +1,4 @@ -namespace Scraper.Models; +namespace Producer.Models; // Torrent represents a crawled torrent from one of our // supported sources. diff --git a/src/producer/Program.cs b/src/producer/Program.cs index 6b7b151..5102b75 100644 --- a/src/producer/Program.cs +++ b/src/producer/Program.cs @@ -1,4 +1,6 @@ -var builder = WebApplication.CreateBuilder(args); +using Producer.Extensions; + +var builder = WebApplication.CreateBuilder(args); builder.Configuration .AddScrapeConfiguration(); diff --git a/src/producer/Services/DapperDataStorage.cs b/src/producer/Services/DapperDataStorage.cs index 4da2671..5fe352e 100644 --- a/src/producer/Services/DapperDataStorage.cs +++ b/src/producer/Services/DapperDataStorage.cs @@ -1,6 +1,6 @@ -namespace Scraper.Services; +namespace Producer.Services; -public class DapperDataStorage(ScrapeConfiguration configuration, ILogger logger) : IDataStorage +public class DapperDataStorage(ScrapeConfiguration configuration, RabbitMqConfiguration rabbitConfig, ILogger logger) : IDataStorage { private const string InsertTorrentSql = """ @@ -65,7 +65,7 @@ public class DapperDataStorage(ScrapeConfiguration configuration, ILogger(GetMovieAndSeriesTorrentsNotProcessedSql); - return torrents.ToList(); + return torrents.Take(rabbitConfig.MaxPublishBatchSize).ToList(); } catch (Exception e) { diff --git a/src/producer/Services/IpService.cs b/src/producer/Services/IpService.cs index 241ca24..ceada0b 100644 --- a/src/producer/Services/IpService.cs +++ b/src/producer/Services/IpService.cs @@ -1,4 +1,4 @@ -namespace Scraper.Services; +namespace Producer.Services; public class IpService(ILogger logger, IHttpClientFactory httpClientFactory) : IIpService { diff --git a/src/producer/Services/TorrentPublisher.cs b/src/producer/Services/TorrentPublisher.cs index c087fc0..f0697ff 100644 --- a/src/producer/Services/TorrentPublisher.cs +++ b/src/producer/Services/TorrentPublisher.cs @@ -1,4 +1,4 @@ -namespace Scraper.Services; +namespace Producer.Services; public class TorrentPublisher(ISendEndpointProvider sendEndpointProvider, RabbitMqConfiguration configuration) : IMessagePublisher {