Files
knightcrawler/src/producer/Jobs/SyncTpbJob.cs
iPromKnight ab17ef81be Big rewrite - distributed consumers for ingestion / scraping(scalable) - single producer written in c#.
Changed from page scraping to rss xml scraping
Includes RealDebridManager hashlist decoding (requires a github readonly PAT as requests must be authenticated) - This allows ingestion of 200k+ entries in a few hours.
Simplifies a lot of torrentio to deal with new data
2024-02-01 16:38:45 +00:00

10 lines
424 B
C#

namespace Scraper.Jobs;
[DisallowConcurrentExecution]
public class SyncTpbJob(ICrawlerProvider crawlerProvider) : BaseJob(crawlerProvider)
{
private const string JobName = nameof(TpbCrawler);
public static readonly JobKey Key = new(JobName, nameof(Crawlers));
public static readonly TriggerKey Trigger = new($"{JobName}-trigger", nameof(Crawlers));
protected override string Crawler => nameof(TpbCrawler);
}