mirror of
https://github.com/knightcrawler-stremio/knightcrawler.git
synced 2024-12-20 03:29:51 +00:00
Big rewrite - distributed consumers for ingestion / scraping(scalable) - single producer written in c#.
Changed from page scraping to rss xml scraping Includes RealDebridManager hashlist decoding (requires a github readonly PAT as requests must be authenticated) - This allows ingestion of 200k+ entries in a few hours. Simplifies a lot of torrentio to deal with new data
This commit is contained in:
10
src/producer/Jobs/SyncTpbJob.cs
Normal file
10
src/producer/Jobs/SyncTpbJob.cs
Normal file
@@ -0,0 +1,10 @@
|
||||
namespace Scraper.Jobs;
|
||||
|
||||
[DisallowConcurrentExecution]
|
||||
public class SyncTpbJob(ICrawlerProvider crawlerProvider) : BaseJob(crawlerProvider)
|
||||
{
|
||||
private const string JobName = nameof(TpbCrawler);
|
||||
public static readonly JobKey Key = new(JobName, nameof(Crawlers));
|
||||
public static readonly TriggerKey Trigger = new($"{JobName}-trigger", nameof(Crawlers));
|
||||
protected override string Crawler => nameof(TpbCrawler);
|
||||
}
|
||||
Reference in New Issue
Block a user