diff --git a/scraper/scrapers/1337x/1337x_scraper.js b/scraper/scrapers/1337x/1337x_scraper.js index f7cd29a..10bb40a 100644 --- a/scraper/scrapers/1337x/1337x_scraper.js +++ b/scraper/scrapers/1337x/1337x_scraper.js @@ -10,7 +10,7 @@ const NAME = '1337x'; const UNTIL_PAGE = 10; const TYPE_MAPPING = typeMapping(); -const limiter = new Bottleneck({ maxConcurrent: 40 }); +const limiter = new Bottleneck({ maxConcurrent: 10 }); async function scrape() { const scrapeStart = moment(); diff --git a/scraper/scrapers/eztv/eztv_scraper.js b/scraper/scrapers/eztv/eztv_scraper.js index ebe7b0b..527bfdf 100644 --- a/scraper/scrapers/eztv/eztv_scraper.js +++ b/scraper/scrapers/eztv/eztv_scraper.js @@ -9,6 +9,7 @@ const NAME = 'EZTV'; const UNTIL_PAGE = 10; const limiter = new Bottleneck({ maxConcurrent: 1 }); +const entryLimiter = new Bottleneck({ maxConcurrent: 10 }); async function scrape() { const scrapeStart = moment(); @@ -42,7 +43,7 @@ async function scrapeLatestTorrentsForCategory(page = 1) { // return Promises.delay(30000).then(() => scrapeLatestTorrentsForCategory(page)) return Promise.resolve([]); }) - .then(torrents => Promise.all(torrents.map(torrent => processTorrentRecord(torrent)))) + .then(torrents => Promise.all(torrents.map(t => entryLimiter.schedule(() => processTorrentRecord(t))))) .then(resolved => resolved.length > 0 && page < UNTIL_PAGE ? scrapeLatestTorrentsForCategory(page + 1) : Promise.resolve()); diff --git a/scraper/scrapers/kickass/kickass_scraper.js b/scraper/scrapers/kickass/kickass_scraper.js index 2273535..2df1308 100644 --- a/scraper/scrapers/kickass/kickass_scraper.js +++ b/scraper/scrapers/kickass/kickass_scraper.js @@ -10,7 +10,7 @@ const NAME = 'KickassTorrents'; const UNTIL_PAGE = 10; const TYPE_MAPPING = typeMapping(); -const limiter = new Bottleneck({ maxConcurrent: 40 }); +const limiter = new Bottleneck({ maxConcurrent: 10 }); async function scrape() { const scrapeStart = moment(); diff --git a/scraper/scrapers/rarbg/rarbg_scraper.js b/scraper/scrapers/rarbg/rarbg_scraper.js index 5a8691c..e5346a8 100644 --- a/scraper/scrapers/rarbg/rarbg_scraper.js +++ b/scraper/scrapers/rarbg/rarbg_scraper.js @@ -11,7 +11,7 @@ const NAME = 'RARBG'; const SEARCH_OPTIONS = { limit: 100, sort: 'seeders', format: 'json_extended', ranked: 0 }; const limiter = new Bottleneck({ maxConcurrent: 1, minTime: 2500 }); -const entryLimiter = new Bottleneck({ maxConcurrent: 40 }); +const entryLimiter = new Bottleneck({ maxConcurrent: 10 }); async function scrape() { const scrapeStart = moment(); @@ -43,7 +43,6 @@ async function scrapeLatestTorrents() { rarbg.CATEGORY.MOVIES_X264_1080P, rarbg.CATEGORY.MOVIES_X264_720P, rarbg.CATEGORY.MOVIES_X264_3D, - rarbg.CATEGORY.MOVIES_FULL_BD, rarbg.CATEGORY.MOVIES_BD_REMUX, rarbg.CATEGORY.TV_EPISODES, rarbg.CATEGORY.TV_UHD_EPISODES, diff --git a/scraper/scrapers/thepiratebay/thepiratebay_scraper.js b/scraper/scrapers/thepiratebay/thepiratebay_scraper.js index 6995973..6a3dc9c 100644 --- a/scraper/scrapers/thepiratebay/thepiratebay_scraper.js +++ b/scraper/scrapers/thepiratebay/thepiratebay_scraper.js @@ -9,7 +9,7 @@ const { createTorrentEntry, getStoredTorrentEntry, updateTorrentSeeders } = requ const NAME = 'ThePirateBay'; const UNTIL_PAGE = 5; -const limiter = new Bottleneck({ maxConcurrent: 40 }); +const limiter = new Bottleneck({ maxConcurrent: 10 }); const allowedCategories = [ thepiratebay.Categories.VIDEO.MOVIES,