[scraper] reduces concurrent entry processing limit

This commit is contained in:
TheBeastLT
2020-05-02 22:20:19 +02:00
parent 452f29896c
commit 9123c1ffd8
5 changed files with 6 additions and 6 deletions

View File

@@ -10,7 +10,7 @@ const NAME = 'KickassTorrents';
const UNTIL_PAGE = 10;
const TYPE_MAPPING = typeMapping();
const limiter = new Bottleneck({ maxConcurrent: 40 });
const limiter = new Bottleneck({ maxConcurrent: 10 });
async function scrape() {
const scrapeStart = moment();