Big rewrite - distributed consumers for ingestion / scraping(scalable) - single producer written in c#.
Changed from page scraping to rss xml scraping Includes RealDebridManager hashlist decoding (requires a github readonly PAT as requests must be authenticated) - This allows ingestion of 200k+ entries in a few hours. Simplifies a lot of torrentio to deal with new data
This commit is contained in:
13
src/producer/Models/RabbitMqConfiguration.cs
Normal file
13
src/producer/Models/RabbitMqConfiguration.cs
Normal file
@@ -0,0 +1,13 @@
|
||||
namespace Scraper.Models;
|
||||
|
||||
public class RabbitMqConfiguration
|
||||
{
|
||||
public const string SectionName = "RabbitMqConfiguration";
|
||||
public const string Filename = "rabbitmq.json";
|
||||
|
||||
public string? Host { get; set; }
|
||||
public string? Username { get; set; }
|
||||
public string? Password { get; set; }
|
||||
public string? QueueName { get; set; }
|
||||
public bool Durable { get; set; }
|
||||
}
|
||||
Reference in New Issue
Block a user