mirror of
https://github.com/knightcrawler-stremio/knightcrawler.git
synced 2024-12-20 03:29:51 +00:00
Big rewrite - distributed consumers for ingestion / scraping(scalable) - single producer written in c#.
Changed from page scraping to rss xml scraping Includes RealDebridManager hashlist decoding (requires a github readonly PAT as requests must be authenticated) - This allows ingestion of 200k+ entries in a few hours. Simplifies a lot of torrentio to deal with new data
This commit is contained in:
22
src/producer/Services/TorrentPublisher.cs
Normal file
22
src/producer/Services/TorrentPublisher.cs
Normal file
@@ -0,0 +1,22 @@
|
||||
namespace Scraper.Services;
|
||||
|
||||
public class TorrentPublisher(ISendEndpointProvider sendEndpointProvider, RabbitMqConfiguration configuration) : IMessagePublisher
|
||||
{
|
||||
public async Task PublishAsync(IEnumerable<Torrent> torrents, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var queueAddress = ConstructQueue();
|
||||
var sendEndpoint = await sendEndpointProvider.GetSendEndpoint(new(queueAddress));
|
||||
await sendEndpoint.SendBatch(torrents, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
private string ConstructQueue()
|
||||
{
|
||||
var queueBuilder = new StringBuilder();
|
||||
queueBuilder.Append("queue:");
|
||||
queueBuilder.Append(configuration.QueueName);
|
||||
queueBuilder.Append("?durable=");
|
||||
queueBuilder.Append(configuration.Durable ? "true" : "false");
|
||||
|
||||
return queueBuilder.ToString();
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user