Big rewrite - distributed consumers for ingestion / scraping(scalable) - single producer written in c#.

Changed from page scraping to rss xml scraping
Includes RealDebridManager hashlist decoding (requires a github readonly PAT as requests must be authenticated) - This allows ingestion of 200k+ entries in a few hours.
Simplifies a lot of torrentio to deal with new data
This commit is contained in:
iPromKnight
2024-02-01 16:38:45 +00:00
parent 6fb4ddcf23
commit ab17ef81be
255 changed files with 18489 additions and 69074 deletions

View File

@@ -0,0 +1,22 @@
namespace Scraper.Services;
public class TorrentPublisher(ISendEndpointProvider sendEndpointProvider, RabbitMqConfiguration configuration) : IMessagePublisher
{
public async Task PublishAsync(IEnumerable<Torrent> torrents, CancellationToken cancellationToken = default)
{
var queueAddress = ConstructQueue();
var sendEndpoint = await sendEndpointProvider.GetSendEndpoint(new(queueAddress));
await sendEndpoint.SendBatch(torrents, cancellationToken: cancellationToken);
}
private string ConstructQueue()
{
var queueBuilder = new StringBuilder();
queueBuilder.Append("queue:");
queueBuilder.Append(configuration.QueueName);
queueBuilder.Append("?durable=");
queueBuilder.Append(configuration.Durable ? "true" : "false");
return queueBuilder.ToString();
}
}