[scraper] reduces concurrent entry processing limit

This commit is contained in:
TheBeastLT
2020-05-02 22:20:19 +02:00
parent 452f29896c
commit 9123c1ffd8
5 changed files with 6 additions and 6 deletions

View File

@@ -11,7 +11,7 @@ const NAME = 'RARBG';
const SEARCH_OPTIONS = { limit: 100, sort: 'seeders', format: 'json_extended', ranked: 0 };
const limiter = new Bottleneck({ maxConcurrent: 1, minTime: 2500 });
const entryLimiter = new Bottleneck({ maxConcurrent: 40 });
const entryLimiter = new Bottleneck({ maxConcurrent: 10 });
async function scrape() {
const scrapeStart = moment();
@@ -43,7 +43,6 @@ async function scrapeLatestTorrents() {
rarbg.CATEGORY.MOVIES_X264_1080P,
rarbg.CATEGORY.MOVIES_X264_720P,
rarbg.CATEGORY.MOVIES_X264_3D,
rarbg.CATEGORY.MOVIES_FULL_BD,
rarbg.CATEGORY.MOVIES_BD_REMUX,
rarbg.CATEGORY.TV_EPISODES,
rarbg.CATEGORY.TV_UHD_EPISODES,