[scraper] reduces concurrent entry processing limit

This commit is contained in:
TheBeastLT
2020-05-02 22:20:19 +02:00
parent 452f29896c
commit 9123c1ffd8
5 changed files with 6 additions and 6 deletions

View File

@@ -9,6 +9,7 @@ const NAME = 'EZTV';
const UNTIL_PAGE = 10;
const limiter = new Bottleneck({ maxConcurrent: 1 });
const entryLimiter = new Bottleneck({ maxConcurrent: 10 });
async function scrape() {
const scrapeStart = moment();
@@ -42,7 +43,7 @@ async function scrapeLatestTorrentsForCategory(page = 1) {
// return Promises.delay(30000).then(() => scrapeLatestTorrentsForCategory(page))
return Promise.resolve([]);
})
.then(torrents => Promise.all(torrents.map(torrent => processTorrentRecord(torrent))))
.then(torrents => Promise.all(torrents.map(t => entryLimiter.schedule(() => processTorrentRecord(t)))))
.then(resolved => resolved.length > 0 && page < UNTIL_PAGE
? scrapeLatestTorrentsForCategory(page + 1)
: Promise.resolve());