diff --git a/scraper/lib/promises.js b/scraper/lib/promises.js new file mode 100644 index 0000000..0fa8c73 --- /dev/null +++ b/scraper/lib/promises.js @@ -0,0 +1,29 @@ +/** + * Exxecute promises in sequence one after another. + */ +async function sequence(promises) { + return promises.reduce((promise, func) => + promise.then(result => func().then(Array.prototype.concat.bind(result))), Promise.resolve([])); +} + +/** + * Return first resolved promise as the result. + */ +function first(promises) { + return Promise.all(promises.map((p) => { + // If a request fails, count that as a resolution so it will keep + // waiting for other possible successes. If a request succeeds, + // treat it as a rejection so Promise.all immediately bails out. + return p.then( + (val) => Promise.reject(val), + (err) => Promise.resolve(err) + ); + })).then( + // If '.all' resolved, we've just got an array of errors. + (errors) => Promise.reject(errors), + // If '.all' rejected, we've got the result we wanted. + (val) => Promise.resolve(val) + ); +} + +module.exports = { sequence, first }; \ No newline at end of file diff --git a/scraper/scrapers/1337x/1337x_api.js b/scraper/scrapers/1337x/1337x_api.js index 62db8ec..61a544d 100644 --- a/scraper/scrapers/1337x/1337x_api.js +++ b/scraper/scrapers/1337x/1337x_api.js @@ -2,6 +2,7 @@ const cheerio = require('cheerio'); const needle = require('needle'); const Sugar = require('sugar-date'); const decode = require('magnet-uri'); +const Promises = require('../../lib/promises'); const defaultProxies = [ 'https://1337x.to' @@ -27,7 +28,7 @@ function torrent(torrentId, config = {}, retries = 2) { const proxyList = config.proxyList || defaultProxies; const slug = torrentId.startsWith('/torrent/') ? torrentId.replace('/torrent/', '') : torrentId; - return raceFirstSuccessful(proxyList + return Promises.first(proxyList .map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${slug}`, config))) .then((body) => parseTorrentPage(body)) .then((torrent) => ({ torrentId: slug, ...torrent })) @@ -41,7 +42,7 @@ function search(keyword, config = {}, retries = 2) { const proxyList = config.proxyList || defaultProxies; const page = config.page || 1; - return raceFirstSuccessful(proxyList + return Promises.first(proxyList .map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/`, config))) .then((body) => parseTableBody(body)) .catch((err) => search(keyword, config, retries - 1)); @@ -55,7 +56,7 @@ function browse(config = {}, retries = 2) { const page = config.page || 1; const category = config.category; - return raceFirstSuccessful(proxyList + return Promises.first(proxyList .map((proxyUrl) => singleRequest(`${proxyUrl}/cat/${category}/${page}/`, config))) .then((body) => parseTableBody(body)) .catch((err) => browse(config, retries - 1)); @@ -154,21 +155,4 @@ function parseSize(sizeText) { return Math.floor(parseFloat(sizeText) * scale); } -function raceFirstSuccessful(promises) { - return Promise.all(promises.map((p) => { - // If a request fails, count that as a resolution so it will keep - // waiting for other possible successes. If a request succeeds, - // treat it as a rejection so Promise.all immediately bails out. - return p.then( - (val) => Promise.reject(val), - (err) => Promise.resolve(err) - ); - })).then( - // If '.all' resolved, we've just got an array of errors. - (errors) => Promise.reject(errors), - // If '.all' rejected, we've got the result we wanted. - (val) => Promise.resolve(val) - ); -} - module.exports = { torrent, search, browse, Categories }; diff --git a/scraper/scrapers/1337x/1337x_scraper.js b/scraper/scrapers/1337x/1337x_scraper.js index 07162ab..a3a7f52 100644 --- a/scraper/scrapers/1337x/1337x_scraper.js +++ b/scraper/scrapers/1337x/1337x_scraper.js @@ -3,15 +3,11 @@ const Bottleneck = require('bottleneck'); const leetx = require('./1337x_api'); const { Type } = require('../../lib/types'); const repository = require('../../lib/repository'); -const { - createTorrentEntry, - createSkipTorrentEntry, - getStoredTorrentEntry, - updateTorrentSeeders -} = require('../../lib/torrentEntries'); +const Promises = require('../../lib/promises'); +const { createTorrentEntry, getStoredTorrentEntry, updateTorrentSeeders } = require('../../lib/torrentEntries'); const NAME = '1337x'; -const UNTIL_PAGE = 1; +const UNTIL_PAGE = 10; const TYPE_MAPPING = typeMapping(); const limiter = new Bottleneck({ maxConcurrent: 40 }); @@ -21,17 +17,15 @@ async function scrape() { const lastScrape = await repository.getProvider({ name: NAME }); console.log(`[${scrapeStart}] starting ${NAME} scrape...`); - const latestTorrents = await getLatestTorrents(); - return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent)))) + return scrapeLatestTorrents() .then(() => { lastScrape.lastScraped = scrapeStart; - lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId; return repository.updateProvider(lastScrape); }) .then(() => console.log(`[${moment()}] finished ${NAME} scrape`)); } -async function getLatestTorrents() { +async function scrapeLatestTorrents() { const allowedCategories = [ leetx.Categories.MOVIE, leetx.Categories.TV, @@ -39,32 +33,36 @@ async function getLatestTorrents() { leetx.Categories.DOCUMENTARIES ]; - return Promise.all(allowedCategories.map(category => getLatestTorrentsForCategory(category))) + return Promises.sequence(allowedCategories.map(category => () => scrapeLatestTorrentsForCategory(category))) .then(entries => entries.reduce((a, b) => a.concat(b), [])); } -async function getLatestTorrentsForCategory(category, page = 1) { - return leetx.browse(({ category: category, page: page })) - .then(torrents => torrents.length && page < UNTIL_PAGE - ? getLatestTorrents(category, page + 1).then(nextTorrents => torrents.concat(nextTorrents)) - : torrents) - .catch(() => []); +async function scrapeLatestTorrentsForCategory(category, page = 1) { + console.log(`Scrapping ${NAME} ${category} category page ${page}`); + return leetx.browse(({ category, page })) + .then(torrents => Promise.all(torrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))) + .then(resolved => resolved.length > 0 && page < UNTIL_PAGE + ? scrapeLatestTorrentsForCategory(category, page + 1) + : Promise.resolve()) + .catch(error => { + console.warn(`Failed ${NAME} scrapping for [${page}] ${category} due: `, error); + return Promise.resolve(); + }); } async function processTorrentRecord(record) { - if (await getStoredTorrentEntry(record)) { - return updateTorrentSeeders(record); - } - const torrentFound = await leetx.torrent(record.torrentId).catch(() => undefined); if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) { - return createSkipTorrentEntry(record); + return Promise.resolve('Invalid torrent record'); } if (isNaN(torrentFound.uploadDate)) { console.warn(`Incorrect upload date for [${torrentFound.infoHash}] ${torrentFound.name}`); return; } + if (await getStoredTorrentEntry(record)) { + return updateTorrentSeeders(record); + } const torrent = { infoHash: torrentFound.infoHash, diff --git a/scraper/scrapers/kickass/kickass_api.js b/scraper/scrapers/kickass/kickass_api.js index 84ec165..ac06d26 100644 --- a/scraper/scrapers/kickass/kickass_api.js +++ b/scraper/scrapers/kickass/kickass_api.js @@ -2,6 +2,7 @@ const cheerio = require('cheerio'); const needle = require('needle'); const moment = require('moment'); const decode = require('magnet-uri'); +const Promises = require('../../lib/promises'); const defaultProxies = [ 'https://katcr.co' @@ -26,7 +27,7 @@ function torrent(torrentId, config = {}, retries = 2) { } const proxyList = config.proxyList || defaultProxies; - return raceFirstSuccessful(proxyList + return Promises.first(proxyList .map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${torrentId}`, config))) .then((body) => parseTorrentPage(body)) .then((torrent) => ({ torrentId, ...torrent })) @@ -41,7 +42,7 @@ function search(keyword, config = {}, retries = 2) { const page = config.page || 1; const category = config.category; - return raceFirstSuccessful(proxyList + return Promises.first(proxyList .map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/99/${category}`, config))) .then((body) => parseTableBody(body)) .catch((err) => search(keyword, config, retries - 1)); @@ -55,7 +56,7 @@ function browse(config = {}, retries = 2) { const page = config.page || 1; const category = config.category; - return raceFirstSuccessful(proxyList + return Promises.first(proxyList .map((proxyUrl) => singleRequest(`${proxyUrl}/category/${category}/page/${page}`, config))) .then((body) => parseTableBody(body)) .catch((err) => browse(config, retries - 1)); @@ -94,9 +95,12 @@ function parseTableBody(body) { $('.table > tbody > tr').each((i, element) => { const row = $(element); + const magnetLink = row.find('a[title="Torrent magnet link"]').attr('href'); torrents.push({ - torrentId: row.find('a[class="torrents_table__torrent_title"]').first().attr('href').replace('/torrent/', ''), name: row.find('a[class="torrents_table__torrent_title"]').first().children('b').text(), + infoHash: decode(magnetLink).infoHash, + magnetLink: magnetLink, + torrentId: row.find('a[class="torrents_table__torrent_title"]').first().attr('href').replace('/torrent/', ''), category: row.find('span[class="torrents_table__upload_info"]').first().children('a').first().attr('href') .match(/category\/([^\/]+)/)[1], seeders: parseInt(row.find('td[data-title="Seed"]').first().text()), @@ -167,21 +171,4 @@ function parseSize(sizeText) { return Math.floor(parseFloat(sizeText.replace(/[',]/g, '')) * scale); } -function raceFirstSuccessful(promises) { - return Promise.all(promises.map((p) => { - // If a request fails, count that as a resolution so it will keep - // waiting for other possible successes. If a request succeeds, - // treat it as a rejection so Promise.all immediately bails out. - return p.then( - (val) => Promise.reject(val), - (err) => Promise.resolve(err) - ); - })).then( - // If '.all' resolved, we've just got an array of errors. - (errors) => Promise.reject(errors), - // If '.all' rejected, we've got the result we wanted. - (val) => Promise.resolve(val) - ); -} - module.exports = { torrent, search, browse, Categories }; diff --git a/scraper/scrapers/kickass/kickass_scraper.js b/scraper/scrapers/kickass/kickass_scraper.js index a23fb64..8d76fe3 100644 --- a/scraper/scrapers/kickass/kickass_scraper.js +++ b/scraper/scrapers/kickass/kickass_scraper.js @@ -3,15 +3,11 @@ const Bottleneck = require('bottleneck'); const kickass = require('./kickass_api'); const { Type } = require('../../lib/types'); const repository = require('../../lib/repository'); -const { - createTorrentEntry, - createSkipTorrentEntry, - getStoredTorrentEntry, - updateTorrentSeeders -} = require('../../lib/torrentEntries'); +const Promises = require('../../lib/promises'); +const { createTorrentEntry, getStoredTorrentEntry, updateTorrentSeeders } = require('../../lib/torrentEntries'); const NAME = 'KickassTorrents'; -const UNTIL_PAGE = 1; +const UNTIL_PAGE = 10; const TYPE_MAPPING = typeMapping(); const limiter = new Bottleneck({ maxConcurrent: 40 }); @@ -21,33 +17,36 @@ async function scrape() { const lastScrape = await repository.getProvider({ name: NAME }); console.log(`[${scrapeStart}] starting ${NAME} scrape...`); - const latestTorrents = await getLatestTorrents(); - return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent)))) + return scrapeLatestTorrents() .then(() => { lastScrape.lastScraped = scrapeStart; - lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId; return repository.updateProvider(lastScrape); }) .then(() => console.log(`[${moment()}] finished ${NAME} scrape`)); } -async function getLatestTorrents() { +async function scrapeLatestTorrents() { const allowedCategories = [ kickass.Categories.MOVIE, kickass.Categories.TV, kickass.Categories.ANIME, ]; - return Promise.all(allowedCategories.map(category => getLatestTorrentsForCategory(category))) + return Promises.sequence(allowedCategories.map(category => () => scrapeLatestTorrentsForCategory(category))) .then(entries => entries.reduce((a, b) => a.concat(b), [])); } -async function getLatestTorrentsForCategory(category, page = 1) { +async function scrapeLatestTorrentsForCategory(category, page = 1) { + console.log(`Scrapping ${NAME} ${category} category page ${page}`); return kickass.browse(({ category, page })) - .then(torrents => torrents.length && page < UNTIL_PAGE - ? getLatestTorrents(category, page + 1).then(nextTorrents => torrents.concat(nextTorrents)) - : torrents) - .catch(() => []); + .then(torrents => Promise.all(torrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))) + .then(resolved => resolved.length > 0 && page < UNTIL_PAGE + ? scrapeLatestTorrentsForCategory(category, page + 1) + : Promise.resolve()) + .catch(error => { + console.warn(`Failed ${NAME} scrapping for [${page}] ${category} due: `, error); + return Promise.resolve(); + }); } async function processTorrentRecord(record) { @@ -58,7 +57,7 @@ async function processTorrentRecord(record) { const torrentFound = await kickass.torrent(record.torrentId).catch(() => undefined); if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) { - return createSkipTorrentEntry(record); + return Promise.resolve('Invalid torrent record'); } const torrent = { @@ -73,7 +72,7 @@ async function processTorrentRecord(record) { seeders: torrentFound.seeders, }; - return createTorrentEntry(torrent); + return createTorrentEntry(torrent).then(() => torrent); } function typeMapping() { diff --git a/scraper/scrapers/rarbg/rarbg_scraper.js b/scraper/scrapers/rarbg/rarbg_scraper.js index 5e57827..f30a12a 100644 --- a/scraper/scrapers/rarbg/rarbg_scraper.js +++ b/scraper/scrapers/rarbg/rarbg_scraper.js @@ -4,11 +4,8 @@ const rarbg = require('rarbg-api'); const decode = require('magnet-uri'); const { Type } = require('../../lib/types'); const repository = require('../../lib/repository'); -const { - createTorrentEntry, - getStoredTorrentEntry, - updateTorrentSeeders -} = require('../../lib/torrentEntries'); +const Promises = require('../../lib/promises'); +const { createTorrentEntry, getStoredTorrentEntry, updateTorrentSeeders } = require('../../lib/torrentEntries'); const NAME = 'RARBG'; @@ -20,17 +17,15 @@ async function scrape() { const lastScrape = await repository.getProvider({ name: NAME }); console.log(`[${scrapeStart}] starting ${NAME} scrape...`); - const latestTorrents = await getLatestTorrents(); - return Promise.all(latestTorrents.map(torrent => entryLimiter.schedule(() => processTorrentRecord(torrent)))) + return scrapeLatestTorrents() .then(() => { lastScrape.lastScraped = scrapeStart; - lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId; return repository.updateProvider(lastScrape); }) .then(() => console.log(`[${moment()}] finished ${NAME} scrape`)); } -async function getLatestTorrents() { +async function scrapeLatestTorrents() { const allowedCategories = [ rarbg.CATEGORY['4K_MOVIES_X264_4k'], rarbg.CATEGORY['4K_X265_4k'], @@ -48,11 +43,13 @@ async function getLatestTorrents() { rarbg.CATEGORY.TV_HD_EPISODES ]; - return Promise.all(allowedCategories.map(category => limiter.schedule(() => getLatestTorrentsForCategory(category)))) + return Promises.sequence(allowedCategories + .map(category => () => limiter.schedule(() => scrapeLatestTorrentsForCategory(category)))) .then(entries => entries.reduce((a, b) => a.concat(b), [])); } -async function getLatestTorrentsForCategory(category) { +async function scrapeLatestTorrentsForCategory(category) { + console.log(`Scrapping ${NAME} ${category} category`); return rarbg.list({ category: category, limit: 100, sort: 'last', format: 'json_extended', ranked: 0 }) .then(torrents => torrents.map(torrent => ({ name: torrent.title, @@ -65,7 +62,11 @@ async function getLatestTorrentsForCategory(category) { uploadDate: new Date(torrent.pubdate), imdbId: torrent.episode_info && torrent.episode_info.imdb }))) - .catch((err) => []); + .then(torrents => Promise.all(torrents.map(t => entryLimiter.schedule(() => processTorrentRecord(t))))) + .catch(error => { + console.warn(`Failed ${NAME} scrapping for [${page}] ${category} due: `, error); + return Promise.resolve(); + }); } async function processTorrentRecord(record) { diff --git a/scraper/scrapers/thepiratebay/thepiratebay_api.js b/scraper/scrapers/thepiratebay/thepiratebay_api.js index 6d9d6e5..3754a91 100644 --- a/scraper/scrapers/thepiratebay/thepiratebay_api.js +++ b/scraper/scrapers/thepiratebay/thepiratebay_api.js @@ -2,6 +2,7 @@ const cheerio = require('cheerio'); const needle = require('needle'); const moment = require('moment'); const decode = require('magnet-uri'); +const Promises = require('../../lib/promises'); const defaultProxies = [ 'https://thepiratebay.org', @@ -82,7 +83,7 @@ function torrent(torrentId, config = {}, retries = 2) { } const proxyList = config.proxyList || defaultProxies; - return raceFirstSuccessful(proxyList + return Promises.first(proxyList .map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${torrentId}`, config))) .then((body) => parseTorrentPage(body)) .then((torrent) => ({ torrentId, ...torrent })) @@ -97,7 +98,7 @@ function search(keyword, config = {}, retries = 2) { const page = config.page || 0; const category = config.category || 0; - return raceFirstSuccessful(proxyList + return Promises.first(proxyList .map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/99/${category}`, config))) .then((body) => parseBody(body)) .catch((err) => search(keyword, config, retries - 1)); @@ -111,7 +112,7 @@ function browse(config = {}, retries = 2) { const page = config.page || 0; const category = config.category || 0; - return raceFirstSuccessful(proxyList + return Promises.first(proxyList .map((proxyUrl) => singleRequest(`${proxyUrl}/browse/${category}/${page}`, config))) .then((body) => parseBody(body)) .catch((err) => browse(config, retries - 1)); @@ -123,7 +124,7 @@ function dumps(config = {}, retries = 2) { } const proxyList = config.proxyList || defaultProxies; - return raceFirstSuccessful(proxyList + return Promises.first(proxyList .map((proxyUrl) => singleRequest(`${proxyUrl}${dumpUrl}`, config) .then((body) => body.match(/([^<]+<\/a>.+\d)/g) .map((group) => ({ @@ -230,21 +231,4 @@ function parseSize(sizeText) { return Math.floor(parseFloat(sizeText) * scale); } -function raceFirstSuccessful(promises) { - return Promise.all(promises.map((p) => { - // If a request fails, count that as a resolution so it will keep - // waiting for other possible successes. If a request succeeds, - // treat it as a rejection so Promise.all immediately bails out. - return p.then( - (val) => Promise.reject(val), - (err) => Promise.resolve(err) - ); - })).then( - // If '.all' resolved, we've just got an array of errors. - (errors) => Promise.reject(errors), - // If '.all' rejected, we've got the result we wanted. - (val) => Promise.resolve(val) - ); -} - module.exports = { torrent, search, browse, dumps, Categories }; diff --git a/scraper/scrapers/thepiratebay/thepiratebay_scraper.js b/scraper/scrapers/thepiratebay/thepiratebay_scraper.js index 565e61d..d4b2854 100644 --- a/scraper/scrapers/thepiratebay/thepiratebay_scraper.js +++ b/scraper/scrapers/thepiratebay/thepiratebay_scraper.js @@ -3,12 +3,8 @@ const Bottleneck = require('bottleneck'); const thepiratebay = require('./thepiratebay_api.js'); const { Type } = require('../../lib/types'); const repository = require('../../lib/repository'); -const { - createTorrentEntry, - createSkipTorrentEntry, - getStoredTorrentEntry, - updateTorrentSeeders -} = require('../../lib/torrentEntries'); +const Promises = require('../../lib/promises'); +const { createTorrentEntry, getStoredTorrentEntry, updateTorrentSeeders } = require('../../lib/torrentEntries'); const NAME = 'ThePirateBay'; const UNTIL_PAGE = 20; @@ -33,27 +29,30 @@ async function scrape() { const lastScrape = await repository.getProvider({ name: NAME }); console.log(`[${scrapeStart}] starting ${NAME} scrape...`); - const latestTorrents = await getLatestTorrents(); - return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent)))) + return scrapeLatestTorrents() .then(() => { lastScrape.lastScraped = scrapeStart; - lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId; return repository.updateProvider(lastScrape); }) .then(() => console.log(`[${moment()}] finished ${NAME} scrape`)); } -async function getLatestTorrents() { - return Promise.all(allowedCategories.map(category => getLatestTorrentsForCategory(category))) +async function scrapeLatestTorrents() { + return Promises.sequence(allowedCategories.map(category => () => scrapeLatestTorrentsForCategory(category))) .then(entries => entries.reduce((a, b) => a.concat(b), [])); } -async function getLatestTorrentsForCategory(category, page = 0) { +async function scrapeLatestTorrentsForCategory(category, page = 1) { + console.log(`Scrapping ${NAME} ${category} category page ${page}`); return thepiratebay.browse(({ category, page })) - .then(torrents => torrents.length && page < UNTIL_PAGE - ? getLatestTorrents(category, page + 1).then(nextTorrents => torrents.concat(nextTorrents)) - : torrents) - .catch(() => []); + .then(torrents => Promise.all(torrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))) + .then(resolved => resolved.length > 0 && page < UNTIL_PAGE + ? scrapeLatestTorrentsForCategory(category, page + 1) + : Promise.resolve()) + .catch(error => { + console.warn(`Failed ${NAME} scrapping for [${page}] ${category} due: `, error); + return Promise.resolve(); + }); } async function processTorrentRecord(record) { @@ -64,7 +63,7 @@ async function processTorrentRecord(record) { const torrentFound = await thepiratebay.torrent(record.torrentId).catch(() => undefined); if (!torrentFound || !allowedCategories.includes(torrentFound.subcategory)) { - return createSkipTorrentEntry(record); + return Promise.resolve('Invalid torrent record'); } const torrent = {